diff --git a/loras/code-switching-corrupted/en-de/adapter_config.json b/loras/code-switching-corrupted/en-de/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/code-switching-corrupted/en-de/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching-corrupted/en-de/head_config.json b/loras/code-switching-corrupted/en-de/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/code-switching-corrupted/en-de/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching-corrupted/en-de/pytorch_adapter.bin b/loras/code-switching-corrupted/en-de/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..02d67286b62590a1c83c0054e8d54512aee9c240 --- /dev/null +++ b/loras/code-switching-corrupted/en-de/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5b620218902b1fa164691ada5439b5c6488c5b56357fee44a8a7fdc421b49c68 +size 5333085 diff --git a/loras/code-switching-corrupted/en-de/pytorch_model_head.bin b/loras/code-switching-corrupted/en-de/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..45e3b0b8767c2eb3144de44ce9d0297391d7b202 --- /dev/null +++ b/loras/code-switching-corrupted/en-de/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6fc32395f38c2d640ac521d6ef506f81adb306c9ad295e90318cca23335b1bce +size 342547 diff --git a/loras/code-switching-corrupted/es-en/adapter_config.json b/loras/code-switching-corrupted/es-en/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/code-switching-corrupted/es-en/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching-corrupted/es-en/head_config.json b/loras/code-switching-corrupted/es-en/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/code-switching-corrupted/es-en/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching-corrupted/es-en/pytorch_adapter.bin b/loras/code-switching-corrupted/es-en/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..755ec1d07261d548d08f1b57597e2375f7df1940 --- /dev/null +++ b/loras/code-switching-corrupted/es-en/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:08540d5fabb344ba2eb3d516e4a087aa16e773c3ad09cae572abdb51c2bb3a90 +size 5333085 diff --git a/loras/code-switching-corrupted/es-en/pytorch_model_head.bin b/loras/code-switching-corrupted/es-en/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f58d8933f8f51957703e3df06458c6b64574a95b --- /dev/null +++ b/loras/code-switching-corrupted/es-en/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3afefaec2a42ac26b95fcf21f246a4ffacba0819a5f1cddaf9dd7c4acb9e6268 +size 342547 diff --git a/loras/code-switching-corrupted/tr-de/adapter_config.json b/loras/code-switching-corrupted/tr-de/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/code-switching-corrupted/tr-de/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching-corrupted/tr-de/head_config.json b/loras/code-switching-corrupted/tr-de/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/code-switching-corrupted/tr-de/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching-corrupted/tr-de/pytorch_adapter.bin b/loras/code-switching-corrupted/tr-de/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..605f4cf60f3cb02961eb7cde003f157a5acb6e5e --- /dev/null +++ b/loras/code-switching-corrupted/tr-de/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:290d14a319427b5039300c53d1e200517c8274b38a5397d9e8b6b2d661fb7e50 +size 5333085 diff --git a/loras/code-switching-corrupted/tr-de/pytorch_model_head.bin b/loras/code-switching-corrupted/tr-de/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..a74eff5daa91e482bc7bfb1034098e373e4d65c5 --- /dev/null +++ b/loras/code-switching-corrupted/tr-de/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f2fb3af8ff4c79fad404eb57d37d517f5216ae3b71613418ff58ce5c1449dc20 +size 342547 diff --git a/loras/code-switching-corrupted/vi-en/adapter_config.json b/loras/code-switching-corrupted/vi-en/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/code-switching-corrupted/vi-en/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching-corrupted/vi-en/head_config.json b/loras/code-switching-corrupted/vi-en/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/code-switching-corrupted/vi-en/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching-corrupted/vi-en/pytorch_adapter.bin b/loras/code-switching-corrupted/vi-en/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..0d320b3437d3adce2dace7c6abd14aaf035024cc --- /dev/null +++ b/loras/code-switching-corrupted/vi-en/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1022dbf5f000ed886bc002926da2ea0ace624e1458c1146c0e120581356721b6 +size 5333085 diff --git a/loras/code-switching-corrupted/vi-en/pytorch_model_head.bin b/loras/code-switching-corrupted/vi-en/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e1fad438497c247f647c47ba259bc39bfdb02fbf --- /dev/null +++ b/loras/code-switching-corrupted/vi-en/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7c187f13b2e109719f4a194de3d138106013e1a2ad73c5eee9cc74505b7e8098 +size 342547 diff --git a/loras/code-switching/en-de/adapter_config.json b/loras/code-switching/en-de/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/code-switching/en-de/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching/en-de/head_config.json b/loras/code-switching/en-de/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/code-switching/en-de/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching/en-de/pytorch_adapter.bin b/loras/code-switching/en-de/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..32bbac66687e200722f6e2fe371c35c8d71cfc9c --- /dev/null +++ b/loras/code-switching/en-de/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:90aad07acbcaa42d91e30bebb00ef056e6f7b9816b34a9c31fd606f776db14bf +size 5333085 diff --git a/loras/code-switching/en-de/pytorch_model_head.bin b/loras/code-switching/en-de/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..932087087508f8fd1621a354ac7b5b8a1e47f704 --- /dev/null +++ b/loras/code-switching/en-de/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d4d50f6c03ef14de4cb639aa7bf26a1cfb3be9fe21f4fe3e33c879e2ac2a93a5 +size 342547 diff --git a/loras/code-switching/es-en/adapter_config.json b/loras/code-switching/es-en/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/code-switching/es-en/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching/es-en/head_config.json b/loras/code-switching/es-en/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/code-switching/es-en/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching/es-en/pytorch_adapter.bin b/loras/code-switching/es-en/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..cdc8a0cdfe6083f9b411f2a399442d01519d1d74 --- /dev/null +++ b/loras/code-switching/es-en/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3f7d3d667315c9823b036186338a7bbc3daf91adde1982072db5e83488e2451b +size 5333085 diff --git a/loras/code-switching/es-en/pytorch_model_head.bin b/loras/code-switching/es-en/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e9f3a9e7149951d2b35011491f76d0b22eee38cd --- /dev/null +++ b/loras/code-switching/es-en/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e331d45655ec68902fd884054ff929f01325111ffcde91a09401d5934c2a578e +size 342547 diff --git a/loras/code-switching/tr-de/adapter_config.json b/loras/code-switching/tr-de/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/code-switching/tr-de/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching/tr-de/head_config.json b/loras/code-switching/tr-de/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/code-switching/tr-de/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching/tr-de/pytorch_adapter.bin b/loras/code-switching/tr-de/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f16bb7bb8cec74c5fb3e4cf25c31cbb1b9747c35 --- /dev/null +++ b/loras/code-switching/tr-de/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cb8d06dc37cb23bd3d802425073a43d57e7053e170f0625d2136703473094a94 +size 5333085 diff --git a/loras/code-switching/tr-de/pytorch_model_head.bin b/loras/code-switching/tr-de/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..1a0434813a68f8eeb1a7e7c2bef7b1f5850767df --- /dev/null +++ b/loras/code-switching/tr-de/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e0fd2ee1960a5df5850a53f33725673a1aface5d671bdb6bc0665ef7d442207 +size 342547 diff --git a/loras/code-switching/vi-en/adapter_config.json b/loras/code-switching/vi-en/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/code-switching/vi-en/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching/vi-en/head_config.json b/loras/code-switching/vi-en/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/code-switching/vi-en/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/code-switching/vi-en/pytorch_adapter.bin b/loras/code-switching/vi-en/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..cb74830fdfb02d85353ef3844272a5f9978c4872 --- /dev/null +++ b/loras/code-switching/vi-en/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0ddbce3e7fced8d542c401a986b4bfc393190780c2d7e1fd9458b0559131a210 +size 5333085 diff --git a/loras/code-switching/vi-en/pytorch_model_head.bin b/loras/code-switching/vi-en/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..487b235615a07c7574e1dea0a593f588e4c0d526 --- /dev/null +++ b/loras/code-switching/vi-en/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:609e6dc23b768c26aee5bcae157b23c460a650ab2888601a34464284b813415f +size 342547 diff --git a/loras/ersatz/ar/adapter_config.json b/loras/ersatz/ar/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/ar/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/ar/head_config.json b/loras/ersatz/ar/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/ar/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/ar/pytorch_adapter.bin b/loras/ersatz/ar/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..7d6111f05a18f339c04fad22925e86ef9ad121ed --- /dev/null +++ b/loras/ersatz/ar/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2780b081cc92009672cc0990b2e36cd84df48f7a20c72a057427af284a2c19bd +size 5333085 diff --git a/loras/ersatz/ar/pytorch_model_head.bin b/loras/ersatz/ar/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..facec1954cce3725770f853c23395c9545a33ed2 --- /dev/null +++ b/loras/ersatz/ar/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7d45c3faf06cd8f3a8cf75d1fd97d362bfec9f7211de89193f99a94dba23a09d +size 342547 diff --git a/loras/ersatz/cs/adapter_config.json b/loras/ersatz/cs/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/cs/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/cs/head_config.json b/loras/ersatz/cs/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/cs/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/cs/pytorch_adapter.bin b/loras/ersatz/cs/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..821a76923c1c5d314b3e6beec54015eebe9eb51a --- /dev/null +++ b/loras/ersatz/cs/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7db7248853dcfe99d715b87dbfb5c674ea4e6e2b719dd456ebe2607002ff80c4 +size 5333085 diff --git a/loras/ersatz/cs/pytorch_model_head.bin b/loras/ersatz/cs/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f75f31647881e09c4c2c0d716990bbe8765f27e6 --- /dev/null +++ b/loras/ersatz/cs/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b9b49dc34c5bca6f9d101edf5482c7664d8bdb3060d7eb43b7390f3ff95199be +size 342547 diff --git a/loras/ersatz/de/adapter_config.json b/loras/ersatz/de/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/de/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/de/head_config.json b/loras/ersatz/de/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/de/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/de/pytorch_adapter.bin b/loras/ersatz/de/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..22e61f0d4d68bee4b771fab3e8fa4e9c53ebc242 --- /dev/null +++ b/loras/ersatz/de/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:44b77c82bb1b543e5135fb77005b1e826491124e06951854520fe219e467256f +size 5333085 diff --git a/loras/ersatz/de/pytorch_model_head.bin b/loras/ersatz/de/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..b93254c871a188db86ef0cb49a8f8a6e26622448 --- /dev/null +++ b/loras/ersatz/de/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:657a818191418f29edf3490fb0d7ece3169d2a4c47a3de400aeba8cfd2aaceb1 +size 342547 diff --git a/loras/ersatz/en/adapter_config.json b/loras/ersatz/en/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/en/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/en/head_config.json b/loras/ersatz/en/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/en/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/en/pytorch_adapter.bin b/loras/ersatz/en/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..040ad965860408de02c7de5a09df7f4bfef9b626 --- /dev/null +++ b/loras/ersatz/en/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:892f0c1e8b95fecaacd253c475c669d0b69cd248e7d345bca0fe3255d1b9f4c4 +size 5333085 diff --git a/loras/ersatz/en/pytorch_model_head.bin b/loras/ersatz/en/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..c5d44795d55503d5939f5ab96fc24fa6531444c4 --- /dev/null +++ b/loras/ersatz/en/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9f12788e66bea019ea29286379ad9c2f7ac29207c50c3447c053d194dee22d9d +size 342547 diff --git a/loras/ersatz/et/adapter_config.json b/loras/ersatz/et/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/et/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/et/head_config.json b/loras/ersatz/et/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/et/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/et/pytorch_adapter.bin b/loras/ersatz/et/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..42b09545f9188a59a2dfbf6a3aa8e60647aba157 --- /dev/null +++ b/loras/ersatz/et/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6cbe606be55fb130a9ffb218a4e645d91725f3f85b4e4df352adb88c65235e3d +size 5333085 diff --git a/loras/ersatz/et/pytorch_model_head.bin b/loras/ersatz/et/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..0f84d4b88924f14f07682a6384c4cc846b853cf9 --- /dev/null +++ b/loras/ersatz/et/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9acf09be942915de7789002fff98cd3b8b9f53bc2e79975edc870c32a0e38f14 +size 342547 diff --git a/loras/ersatz/fi/adapter_config.json b/loras/ersatz/fi/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/fi/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/fi/head_config.json b/loras/ersatz/fi/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/fi/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/fi/pytorch_adapter.bin b/loras/ersatz/fi/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c45adcda92a3d93175906af7ec0726658fc13eb9 --- /dev/null +++ b/loras/ersatz/fi/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:39ca019bd2bb9ec72e975b41f0d95efcb941fbb5307e6e64b8264477f6e93de2 +size 5333085 diff --git a/loras/ersatz/fi/pytorch_model_head.bin b/loras/ersatz/fi/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..70f22e94fb7ba7beaca97f0a9fcd22451072dc5b --- /dev/null +++ b/loras/ersatz/fi/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c631b736f243bd953a1245e43085b768b94d9ca2a607eaf1f4b43d0cad4efcd2 +size 342547 diff --git a/loras/ersatz/fr/adapter_config.json b/loras/ersatz/fr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/fr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/fr/head_config.json b/loras/ersatz/fr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/fr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/fr/pytorch_adapter.bin b/loras/ersatz/fr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..7881913abf162b2269d5dab836423ab85578d062 --- /dev/null +++ b/loras/ersatz/fr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9e3e9906814cf40ffe66cab8be98a3df51ad4c80b4258c705faaed3319ca6da8 +size 5333085 diff --git a/loras/ersatz/fr/pytorch_model_head.bin b/loras/ersatz/fr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..464e67027ac36801297d25d8173f5b580705bd8b --- /dev/null +++ b/loras/ersatz/fr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:938c2eb8705eb1245121f293a19fac41b95a7ad423bd4e7159de77825e13b9f7 +size 342547 diff --git a/loras/ersatz/gu/adapter_config.json b/loras/ersatz/gu/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/gu/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/gu/head_config.json b/loras/ersatz/gu/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/gu/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/gu/pytorch_adapter.bin b/loras/ersatz/gu/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..e123f781780fdee0f232e4ade3d870634278d597 --- /dev/null +++ b/loras/ersatz/gu/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:11e6276e460c8a77b6a84c9073d4ca656768176292e651cfec1ee743bc0d8f65 +size 5333085 diff --git a/loras/ersatz/gu/pytorch_model_head.bin b/loras/ersatz/gu/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..fc0ad09012566c1e1c126366611d6fb7e72ef614 --- /dev/null +++ b/loras/ersatz/gu/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fa57267161d84b0aec2e0ca814001a9db97b8f9005eca0479e2edcc818c6c637 +size 342547 diff --git a/loras/ersatz/hi/adapter_config.json b/loras/ersatz/hi/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/hi/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/hi/head_config.json b/loras/ersatz/hi/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/hi/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/hi/pytorch_adapter.bin b/loras/ersatz/hi/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..69355d9389b9c348d993fe486375f4747d35bd8d --- /dev/null +++ b/loras/ersatz/hi/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f5baa98a1295cfb682c2c460d2cdf66ca66f995222123b9c3dc9045650258ef8 +size 5333085 diff --git a/loras/ersatz/hi/pytorch_model_head.bin b/loras/ersatz/hi/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..a95673c8523913339c087448d07177838390aa03 --- /dev/null +++ b/loras/ersatz/hi/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:640cd039eacb2f62f9584de96fc494f6bccef587d6530ad21cc5c75dd1cba72a +size 342547 diff --git a/loras/ersatz/ja/adapter_config.json b/loras/ersatz/ja/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/ja/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/ja/head_config.json b/loras/ersatz/ja/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/ja/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/ja/pytorch_adapter.bin b/loras/ersatz/ja/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..5d619680c64af1647914f780cb3d758a966088ba --- /dev/null +++ b/loras/ersatz/ja/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8474620733b6516b685a8c3e960cc43bd37115d3f4bd9deb47c4145d1e38e7bd +size 5333085 diff --git a/loras/ersatz/ja/pytorch_model_head.bin b/loras/ersatz/ja/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..560f548989ba5384ac6476ef7deba7abb2fe580b --- /dev/null +++ b/loras/ersatz/ja/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a64937d757f841e6d15ae48ff27975d9e7b468a3af665cf7412f1574e5b72c0 +size 342547 diff --git a/loras/ersatz/kk/adapter_config.json b/loras/ersatz/kk/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/kk/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/kk/head_config.json b/loras/ersatz/kk/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/kk/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/kk/pytorch_adapter.bin b/loras/ersatz/kk/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..fb951a5e0e68da906228c280e5be4d2d4c5b88b8 --- /dev/null +++ b/loras/ersatz/kk/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d2727f7a861a679a489998f5bc355239089acf2ecc8569557c9e9a95902e4059 +size 5333085 diff --git a/loras/ersatz/kk/pytorch_model_head.bin b/loras/ersatz/kk/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..0d791ff2d8c53b8b1f340a8d1074bd8ec7d47177 --- /dev/null +++ b/loras/ersatz/kk/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1db7b4036bce080e740893fee69bad4607710090fdc830f7212e601e68b9e994 +size 342547 diff --git a/loras/ersatz/km/adapter_config.json b/loras/ersatz/km/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/km/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/km/head_config.json b/loras/ersatz/km/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/km/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/km/pytorch_adapter.bin b/loras/ersatz/km/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..981540a62cea2f1d9e5c9a5b6882e274fa316c1f --- /dev/null +++ b/loras/ersatz/km/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:11951d7772d4a703aad1e935fa888cca8f1d74e65bd1fb6aa895e6c715595036 +size 5333085 diff --git a/loras/ersatz/km/pytorch_model_head.bin b/loras/ersatz/km/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..3d1e14e73ffb95add685dc229d07cb3bbf33e58a --- /dev/null +++ b/loras/ersatz/km/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a89432df782e2c68c6eb99ca3c0f7f00077034c7703761e99c74ea37deb3b51 +size 342547 diff --git a/loras/ersatz/lt/adapter_config.json b/loras/ersatz/lt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/lt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/lt/head_config.json b/loras/ersatz/lt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/lt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/lt/pytorch_adapter.bin b/loras/ersatz/lt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..96ca104cb5e96cbbac5a47941102596e48a0efe6 --- /dev/null +++ b/loras/ersatz/lt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:14a6e3a0f15f19b16e59177e69bff5c2be8259133f12e41b4defbc4727013dfa +size 5333085 diff --git a/loras/ersatz/lt/pytorch_model_head.bin b/loras/ersatz/lt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..b77bc3c0a7b0065e7d6bd5c28a9080f47419311f --- /dev/null +++ b/loras/ersatz/lt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5870d0d9748662423151ae7f66745d386274164faccceb8432218eeb573bc223 +size 342547 diff --git a/loras/ersatz/lv/adapter_config.json b/loras/ersatz/lv/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/lv/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/lv/head_config.json b/loras/ersatz/lv/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/lv/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/lv/pytorch_adapter.bin b/loras/ersatz/lv/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..6c284225032c6b1681c4f82ebb0fcb4141e85032 --- /dev/null +++ b/loras/ersatz/lv/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:31475834a4ac1121fc0f349cb80180f4e176bdea55b180a7717200b11e510c7a +size 5333085 diff --git a/loras/ersatz/lv/pytorch_model_head.bin b/loras/ersatz/lv/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..6b116dc3f579033754d55796b39b123084f51608 --- /dev/null +++ b/loras/ersatz/lv/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dfd90ab506ba4dc01a74fa4b703ea63223421e5b80cf244e671ca6b1213a15ad +size 342547 diff --git a/loras/ersatz/pl/adapter_config.json b/loras/ersatz/pl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/pl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/pl/head_config.json b/loras/ersatz/pl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/pl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/pl/pytorch_adapter.bin b/loras/ersatz/pl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..9aee1bd95fbf035c8d995a77edc8b6c27363405f --- /dev/null +++ b/loras/ersatz/pl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1505d915e1381cd56146b2fef4402389cee77f43ac43e85275a2281c09f904c0 +size 5333085 diff --git a/loras/ersatz/pl/pytorch_model_head.bin b/loras/ersatz/pl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..07c35fbcf396359725bd851fe74e8e812ef60467 --- /dev/null +++ b/loras/ersatz/pl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6042ba3ebee018f23b02f1243e029e7515863b704b58d486ef2d482c4a38fc1 +size 342547 diff --git a/loras/ersatz/ps/adapter_config.json b/loras/ersatz/ps/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/ps/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/ps/head_config.json b/loras/ersatz/ps/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/ps/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/ps/pytorch_adapter.bin b/loras/ersatz/ps/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f8e84f4f6b02bcbb6e2b764c906c47c6d05b8edf --- /dev/null +++ b/loras/ersatz/ps/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:792fdfbb3dd24aa162c3dc939724f006a3dedb739d7d2ac050e2e6e0a35bdd9f +size 5333085 diff --git a/loras/ersatz/ps/pytorch_model_head.bin b/loras/ersatz/ps/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..394bede2750f21c38e7cfe2ca5a7478ac40f73c0 --- /dev/null +++ b/loras/ersatz/ps/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6be6df812c69d9d32dda8aeafb749c43e450c4b16dfd3a7496d41eed42eede48 +size 342547 diff --git a/loras/ersatz/ro/adapter_config.json b/loras/ersatz/ro/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/ro/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/ro/head_config.json b/loras/ersatz/ro/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/ro/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/ro/pytorch_adapter.bin b/loras/ersatz/ro/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..1bacb0f605e3e2cd68f10612cae26b7cc4bb30e4 --- /dev/null +++ b/loras/ersatz/ro/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:565e59ebfe991815cc8d72d85d294dc129bded7e66bfafc0dfc33ab38faa03ae +size 5333085 diff --git a/loras/ersatz/ro/pytorch_model_head.bin b/loras/ersatz/ro/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..c59b6d48b701fad48ebdfa1aca180359b7274c3f --- /dev/null +++ b/loras/ersatz/ro/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7fe618da5e78af855d184a3ca9e87f02b335680577c0cc25858265c2d42f5716 +size 342547 diff --git a/loras/ersatz/ru/adapter_config.json b/loras/ersatz/ru/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/ru/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/ru/head_config.json b/loras/ersatz/ru/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/ru/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/ru/pytorch_adapter.bin b/loras/ersatz/ru/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..fe4ddf4c646a527293ce85f04eb75adaadd1ba7f --- /dev/null +++ b/loras/ersatz/ru/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9ec252da0a8a1d4e83bc1a3f0fc8d788681da7b1b816c8a42459371b6a144819 +size 5333085 diff --git a/loras/ersatz/ru/pytorch_model_head.bin b/loras/ersatz/ru/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..d6db6d12e36ecae3a7544b83b2e467d0723ebe0a --- /dev/null +++ b/loras/ersatz/ru/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d21ef28bc0f73fe178f4b12d3f49817fca7e476bfe20efd204971d160e811f6e +size 342547 diff --git a/loras/ersatz/ta/adapter_config.json b/loras/ersatz/ta/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/ta/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/ta/head_config.json b/loras/ersatz/ta/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/ta/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/ta/pytorch_adapter.bin b/loras/ersatz/ta/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..710331b83fef1cf21b59ea41d124f12d12224423 --- /dev/null +++ b/loras/ersatz/ta/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a4b91a6bc255c8b0a4e8ed1ad190160bfeff71aa7cfc9b8fcc4419401356f712 +size 5333085 diff --git a/loras/ersatz/ta/pytorch_model_head.bin b/loras/ersatz/ta/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ab37d758b14840a7d6b6a5b02f8efb7ff435c010 --- /dev/null +++ b/loras/ersatz/ta/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b3efbf41ede6f829c8e5ac613581091559a45a4973206f418778879ea3ffc588 +size 342547 diff --git a/loras/ersatz/tr/adapter_config.json b/loras/ersatz/tr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/tr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/tr/head_config.json b/loras/ersatz/tr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/tr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/tr/pytorch_adapter.bin b/loras/ersatz/tr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..daac49b98c88d49375969e1e28d5095ca29070f2 --- /dev/null +++ b/loras/ersatz/tr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:862777b0b116ca16ea1d989aebdfdce3341d6bd101997c9545a81e64b975efdb +size 5333085 diff --git a/loras/ersatz/tr/pytorch_model_head.bin b/loras/ersatz/tr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..451d76a49060e39ed9cb70726f9ca26de308a5e7 --- /dev/null +++ b/loras/ersatz/tr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8b8fbb157774cf9e4fcf7e90643d19fef9305501ab3eeea49c1a455490086334 +size 342547 diff --git a/loras/ersatz/zh/adapter_config.json b/loras/ersatz/zh/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ersatz/zh/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/zh/head_config.json b/loras/ersatz/zh/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ersatz/zh/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ersatz/zh/pytorch_adapter.bin b/loras/ersatz/zh/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..9c165447a5b5ce4bcc1c05c9ed9a3d3e077c9c58 --- /dev/null +++ b/loras/ersatz/zh/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:41cc8c530527f3932ce372530a61b1a24f914b764fa7fbbd445329c4672c45f9 +size 5333085 diff --git a/loras/ersatz/zh/pytorch_model_head.bin b/loras/ersatz/zh/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..9a6f67688591720ef81bef22d107ad5c197ad90b --- /dev/null +++ b/loras/ersatz/zh/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ab0250d8cf060d715924371816409020790bf2075ff850e4424fec70281e57c3 +size 342547 diff --git a/loras/legal-judgements-corrupted/de/adapter_config.json b/loras/legal-judgements-corrupted/de/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-judgements-corrupted/de/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements-corrupted/de/head_config.json b/loras/legal-judgements-corrupted/de/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-judgements-corrupted/de/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements-corrupted/de/pytorch_adapter.bin b/loras/legal-judgements-corrupted/de/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c2110357c9d4ad1094b2e5f8a277729a0aeca46d --- /dev/null +++ b/loras/legal-judgements-corrupted/de/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c29da685193b7c5deddc5016ca94fbb890c27f59347145662f43060ede93b11a +size 5333085 diff --git a/loras/legal-judgements-corrupted/de/pytorch_model_head.bin b/loras/legal-judgements-corrupted/de/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ff4a4c5b66cae599efc89d18fb68c55c2ac7a327 --- /dev/null +++ b/loras/legal-judgements-corrupted/de/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fa9e5aea58435a7a05d9f4c6548279d916a7080c5b0292d06b813c996fa779d7 +size 342547 diff --git a/loras/legal-judgements-corrupted/en/adapter_config.json b/loras/legal-judgements-corrupted/en/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-judgements-corrupted/en/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements-corrupted/en/head_config.json b/loras/legal-judgements-corrupted/en/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-judgements-corrupted/en/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements-corrupted/en/pytorch_adapter.bin b/loras/legal-judgements-corrupted/en/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..431dcd5d239200c0a52396f2f6d1cac9bb0d89f3 --- /dev/null +++ b/loras/legal-judgements-corrupted/en/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:12b96e8827da3f29cd7bf22666610bdf2c6b1004af0606520c9f4553e7f39e34 +size 5333085 diff --git a/loras/legal-judgements-corrupted/en/pytorch_model_head.bin b/loras/legal-judgements-corrupted/en/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..94f844e352679462c841b77baf47e3df193c24fd --- /dev/null +++ b/loras/legal-judgements-corrupted/en/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ab9f5fc94bf8034a859ecb71fd8352cc6480e610af67239f6e100802d771c530 +size 342547 diff --git a/loras/legal-judgements-corrupted/es/adapter_config.json b/loras/legal-judgements-corrupted/es/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-judgements-corrupted/es/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements-corrupted/es/head_config.json b/loras/legal-judgements-corrupted/es/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-judgements-corrupted/es/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements-corrupted/es/pytorch_adapter.bin b/loras/legal-judgements-corrupted/es/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..0b364a2ef2c6b138c15c918f42f6c3b3db431fcb --- /dev/null +++ b/loras/legal-judgements-corrupted/es/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9b15febc4447d6202de6c969cb819ddf84fffabdaeb0aad4482b0da3c03b6232 +size 5333085 diff --git a/loras/legal-judgements-corrupted/es/pytorch_model_head.bin b/loras/legal-judgements-corrupted/es/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ed802c9f8f7531fe01c1c3954ad9567c35a37750 --- /dev/null +++ b/loras/legal-judgements-corrupted/es/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a83c97b75bbed05cbe2ff6431062a3dda5c737f4fd6eb2da67e29b0bb8f1858 +size 342547 diff --git a/loras/legal-judgements-corrupted/fr/adapter_config.json b/loras/legal-judgements-corrupted/fr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-judgements-corrupted/fr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements-corrupted/fr/head_config.json b/loras/legal-judgements-corrupted/fr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-judgements-corrupted/fr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements-corrupted/fr/pytorch_adapter.bin b/loras/legal-judgements-corrupted/fr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c1cb7912b74104b300aa1519d7076694c18174c5 --- /dev/null +++ b/loras/legal-judgements-corrupted/fr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc0950cbf9927956ddfe3b3525925c84de420e6fdd9c377d1245bb595a48e84f +size 5333085 diff --git a/loras/legal-judgements-corrupted/fr/pytorch_model_head.bin b/loras/legal-judgements-corrupted/fr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..2aea61825bf1e8793c88342783b60b0763a051a9 --- /dev/null +++ b/loras/legal-judgements-corrupted/fr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2304425b49f6749eeba11122cee86e781765539c49f286274ec3778017200182 +size 342547 diff --git a/loras/legal-judgements-corrupted/it/adapter_config.json b/loras/legal-judgements-corrupted/it/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-judgements-corrupted/it/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements-corrupted/it/head_config.json b/loras/legal-judgements-corrupted/it/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-judgements-corrupted/it/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements-corrupted/it/pytorch_adapter.bin b/loras/legal-judgements-corrupted/it/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f8cbfdcfcf4d3c90c522d27c916eef7122627de7 --- /dev/null +++ b/loras/legal-judgements-corrupted/it/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b46daabe1989ca87bd4888d1c87ddfd4d1dd20495a8611b86d0369b0f10fc54f +size 5333085 diff --git a/loras/legal-judgements-corrupted/it/pytorch_model_head.bin b/loras/legal-judgements-corrupted/it/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ffd95cd6affa338b3eddbb34b01759f1558d175d --- /dev/null +++ b/loras/legal-judgements-corrupted/it/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d2bbcf32a78d452eee9eded20fa062f3ed5ac756f607e97d9ae9415839b60cde +size 342547 diff --git a/loras/legal-judgements-corrupted/pt/adapter_config.json b/loras/legal-judgements-corrupted/pt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-judgements-corrupted/pt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements-corrupted/pt/head_config.json b/loras/legal-judgements-corrupted/pt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-judgements-corrupted/pt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements-corrupted/pt/pytorch_adapter.bin b/loras/legal-judgements-corrupted/pt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..2eeb38d4f34bdaa22ef70b254046774bce73bfd5 --- /dev/null +++ b/loras/legal-judgements-corrupted/pt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6db459f221e88777d8745ffcfa2d408973a740b7d400cfa05033039d653f88b4 +size 5333085 diff --git a/loras/legal-judgements-corrupted/pt/pytorch_model_head.bin b/loras/legal-judgements-corrupted/pt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..bea6f7cedf44f7eab5e1d917080abac1d6589821 --- /dev/null +++ b/loras/legal-judgements-corrupted/pt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be2f7e7ed5e4987b3d9eddfff1c30536633f0c9edf211b2b872ffce23553d395 +size 342547 diff --git a/loras/legal-judgements/de/adapter_config.json b/loras/legal-judgements/de/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-judgements/de/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements/de/head_config.json b/loras/legal-judgements/de/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-judgements/de/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements/de/pytorch_adapter.bin b/loras/legal-judgements/de/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..12bcd53b2eb74c2dd8a420798168f858f5be5582 --- /dev/null +++ b/loras/legal-judgements/de/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8134c21093e741feeb457bbe24da471c60a7c1d0e68671ae53d0c0b6a2d35e3c +size 5333085 diff --git a/loras/legal-judgements/de/pytorch_model_head.bin b/loras/legal-judgements/de/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..eda6114187066e6ee19e7ada831f1ffc406ea8ab --- /dev/null +++ b/loras/legal-judgements/de/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ada8e4aaaac62f2643c3d7e50ca227f8b3554d29636bf9c167722344f716774a +size 342547 diff --git a/loras/legal-judgements/en/adapter_config.json b/loras/legal-judgements/en/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-judgements/en/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements/en/head_config.json b/loras/legal-judgements/en/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-judgements/en/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements/en/pytorch_adapter.bin b/loras/legal-judgements/en/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..cf9fc7977f15b1874513d021b278d7b34770ad53 --- /dev/null +++ b/loras/legal-judgements/en/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1d6d14f93acdc31e1ef7cbed1f30e0c405f6bdee8ed7f79493198312c7b9d37d +size 5333085 diff --git a/loras/legal-judgements/en/pytorch_model_head.bin b/loras/legal-judgements/en/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..eb2b2210c822af24f26db83ae888cf39a21167a8 --- /dev/null +++ b/loras/legal-judgements/en/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d6730e05c4168b95caa3e63a836b7e8ec2f13420057de84bf0ea392e20887b48 +size 342547 diff --git a/loras/legal-judgements/es/adapter_config.json b/loras/legal-judgements/es/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-judgements/es/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements/es/head_config.json b/loras/legal-judgements/es/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-judgements/es/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements/es/pytorch_adapter.bin b/loras/legal-judgements/es/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..2c9687eac3bdca536e5789a5d0b0c9681f7752c4 --- /dev/null +++ b/loras/legal-judgements/es/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:afbd080ad6d164539a50f9f07ba5ef668ffcf940224a611d0ffc917e2361b0c6 +size 5333085 diff --git a/loras/legal-judgements/es/pytorch_model_head.bin b/loras/legal-judgements/es/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..7922df4d643f2cfd3027ec7b1b55bb72e44d43d6 --- /dev/null +++ b/loras/legal-judgements/es/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5930134c69765f36a88ba4d859e773bb669f4cda503807062e62fb7b3456b2be +size 342547 diff --git a/loras/legal-judgements/fr/adapter_config.json b/loras/legal-judgements/fr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-judgements/fr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements/fr/head_config.json b/loras/legal-judgements/fr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-judgements/fr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements/fr/pytorch_adapter.bin b/loras/legal-judgements/fr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ccc11ce2034af63cacd42f7a131e8db18b2ead92 --- /dev/null +++ b/loras/legal-judgements/fr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f880b159b29b91fa492434159e258800f0a8bb66480d0c8ddac1737ca40b7377 +size 5333085 diff --git a/loras/legal-judgements/fr/pytorch_model_head.bin b/loras/legal-judgements/fr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..2435c69e8a260484027cb625eb807d31dbc91c8f --- /dev/null +++ b/loras/legal-judgements/fr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:af330942e95a79c8644e7a5cfc75d2d10a43b26e9b505d9132f5f2b673fdf1dc +size 342547 diff --git a/loras/legal-judgements/it/adapter_config.json b/loras/legal-judgements/it/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-judgements/it/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements/it/head_config.json b/loras/legal-judgements/it/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-judgements/it/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements/it/pytorch_adapter.bin b/loras/legal-judgements/it/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..d70a765766980dc86fdb2712b76aa6d5a9510bdc --- /dev/null +++ b/loras/legal-judgements/it/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0da4a308a1d2e49f7a5017dff1f0761edf0dce0b705e1789fbf34b588c3fa720 +size 5333085 diff --git a/loras/legal-judgements/it/pytorch_model_head.bin b/loras/legal-judgements/it/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..443f2f2c00309e8f1538481025fa4cd7f7abf38b --- /dev/null +++ b/loras/legal-judgements/it/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:46e631dd2691c0cd795a08565414282afa7be6dd438bbbb7f832f654d0332038 +size 342547 diff --git a/loras/legal-judgements/pt/adapter_config.json b/loras/legal-judgements/pt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-judgements/pt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements/pt/head_config.json b/loras/legal-judgements/pt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-judgements/pt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-judgements/pt/pytorch_adapter.bin b/loras/legal-judgements/pt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..3cb0a1292adb6e1c39a69211443cb21fe1217727 --- /dev/null +++ b/loras/legal-judgements/pt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d217b5a8397b2ecc23876a9933bb7265f7eb4cc024ceed4804afc9fee2d2f124 +size 5333085 diff --git a/loras/legal-judgements/pt/pytorch_model_head.bin b/loras/legal-judgements/pt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..576a2a701c77b89a20748d4dfeb5c5498572fb9f --- /dev/null +++ b/loras/legal-judgements/pt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:198696ede92aec9b0aba07892e69d3be3590eb0ffdc016ea1e4cecc5dcc92d47 +size 342547 diff --git a/loras/legal-laws-corrupted/de/adapter_config.json b/loras/legal-laws-corrupted/de/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-laws-corrupted/de/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws-corrupted/de/head_config.json b/loras/legal-laws-corrupted/de/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-laws-corrupted/de/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws-corrupted/de/pytorch_adapter.bin b/loras/legal-laws-corrupted/de/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..adc3d55610fa7fa9db4727755c864fe3f125eee0 --- /dev/null +++ b/loras/legal-laws-corrupted/de/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ff682bfc27ae6a6e67207228f03c9f0dfe7262ef6ee560bc49e26fe005d7324b +size 5333085 diff --git a/loras/legal-laws-corrupted/de/pytorch_model_head.bin b/loras/legal-laws-corrupted/de/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..18ee32e19206bafeb53a03aae02fe9e1055ddffb --- /dev/null +++ b/loras/legal-laws-corrupted/de/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6be222be77e2c000d9c5f8fb8be30231628f56268990c5273fea2dfe86c9d348 +size 342547 diff --git a/loras/legal-laws-corrupted/es/adapter_config.json b/loras/legal-laws-corrupted/es/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-laws-corrupted/es/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws-corrupted/es/head_config.json b/loras/legal-laws-corrupted/es/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-laws-corrupted/es/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws-corrupted/es/pytorch_adapter.bin b/loras/legal-laws-corrupted/es/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..4dc4e8812769763547b04667c390f712c8c7dc64 --- /dev/null +++ b/loras/legal-laws-corrupted/es/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:21e25e6228f21652ea658d962f9f44a18efa756dff783658771b502186c6eed7 +size 5333085 diff --git a/loras/legal-laws-corrupted/es/pytorch_model_head.bin b/loras/legal-laws-corrupted/es/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..048514db90c15b4c1fd7822b486db847b8a5f0a1 --- /dev/null +++ b/loras/legal-laws-corrupted/es/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f04f29bb602a7b67468ad5f94d7dcd949e8c2c4e655aba474ed2a17b67a00de3 +size 342547 diff --git a/loras/legal-laws-corrupted/fr/adapter_config.json b/loras/legal-laws-corrupted/fr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-laws-corrupted/fr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws-corrupted/fr/head_config.json b/loras/legal-laws-corrupted/fr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-laws-corrupted/fr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws-corrupted/fr/pytorch_adapter.bin b/loras/legal-laws-corrupted/fr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..503c551be919652639ca5bd74d5b311c1a18dcc2 --- /dev/null +++ b/loras/legal-laws-corrupted/fr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1b9cb386482d226e08a9479e5458565984365b0f8cb470fe94d51c05c6d7c492 +size 5333085 diff --git a/loras/legal-laws-corrupted/fr/pytorch_model_head.bin b/loras/legal-laws-corrupted/fr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..51dd7ab2ecabbbb2a4ec9fe7b54ce21c18eaa905 --- /dev/null +++ b/loras/legal-laws-corrupted/fr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:643e9c2ee003fc8976f2244b622c34617ae89aa413a4f1ac701db4a209e8a8d1 +size 342547 diff --git a/loras/legal-laws-corrupted/it/adapter_config.json b/loras/legal-laws-corrupted/it/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-laws-corrupted/it/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws-corrupted/it/head_config.json b/loras/legal-laws-corrupted/it/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-laws-corrupted/it/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws-corrupted/it/pytorch_adapter.bin b/loras/legal-laws-corrupted/it/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..a631f53b7585dd179ce1c32185ac295ec9870128 --- /dev/null +++ b/loras/legal-laws-corrupted/it/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:738642c0e469409bc79946820ab7defafdf885a13596d4b5a5a4598b2bf73692 +size 5333085 diff --git a/loras/legal-laws-corrupted/it/pytorch_model_head.bin b/loras/legal-laws-corrupted/it/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..6a2e22fcbd82b276063022cfe76a9be51a17f10a --- /dev/null +++ b/loras/legal-laws-corrupted/it/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0e59be425afcae0eb11d4977414e13b2f9cc530693821cf5dcf2d290d1911d35 +size 342547 diff --git a/loras/legal-laws-corrupted/pt/adapter_config.json b/loras/legal-laws-corrupted/pt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-laws-corrupted/pt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws-corrupted/pt/head_config.json b/loras/legal-laws-corrupted/pt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-laws-corrupted/pt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws-corrupted/pt/pytorch_adapter.bin b/loras/legal-laws-corrupted/pt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..d2e1e79588686353772bfeca14837e94be2672fa --- /dev/null +++ b/loras/legal-laws-corrupted/pt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:701110e14f8aff01c5c44c2017edc5fd6cc340daed348cc69289ec49cd384824 +size 5333085 diff --git a/loras/legal-laws-corrupted/pt/pytorch_model_head.bin b/loras/legal-laws-corrupted/pt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..87f009aedbebe5436e974407469ce6a20aefb7cf --- /dev/null +++ b/loras/legal-laws-corrupted/pt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b88bf90543795acd4b03ed3367714e994d42dc90a8916ed9ab7981e4a2866887 +size 342547 diff --git a/loras/legal-laws/de/adapter_config.json b/loras/legal-laws/de/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-laws/de/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws/de/head_config.json b/loras/legal-laws/de/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-laws/de/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws/de/pytorch_adapter.bin b/loras/legal-laws/de/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..d6ba2a0bd8d7337920d35a5681bfe6f4b60debc2 --- /dev/null +++ b/loras/legal-laws/de/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2176b6b130be15cbd1f0eec10fe6f1cd334dd6c6a2d8c88d582563aba3b4814a +size 5333085 diff --git a/loras/legal-laws/de/pytorch_model_head.bin b/loras/legal-laws/de/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..8c47e10628a332c09e2b5d0a88e9c0951478eb70 --- /dev/null +++ b/loras/legal-laws/de/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:001805bafbde9ddce3308c919c95cc9d74bbaa660ecdf6ea1899c0dcd951ba9d +size 342547 diff --git a/loras/legal-laws/es/adapter_config.json b/loras/legal-laws/es/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-laws/es/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws/es/head_config.json b/loras/legal-laws/es/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-laws/es/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws/es/pytorch_adapter.bin b/loras/legal-laws/es/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..7a732feba3d574085c28fa2a92af3bc0b5f660a4 --- /dev/null +++ b/loras/legal-laws/es/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d82c45c2e33098d6286fbf405ce146d2e7b3e2b3a03226c03e650ab4af3f14a4 +size 5333085 diff --git a/loras/legal-laws/es/pytorch_model_head.bin b/loras/legal-laws/es/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..a729c8cf941839c98703bc18a9f445904db062d1 --- /dev/null +++ b/loras/legal-laws/es/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1b0037a9e516c8ca84ba94f01aa50766d0dffd09067d46b6ff2993e4b36b5b9c +size 342547 diff --git a/loras/legal-laws/fr/adapter_config.json b/loras/legal-laws/fr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-laws/fr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws/fr/head_config.json b/loras/legal-laws/fr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-laws/fr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws/fr/pytorch_adapter.bin b/loras/legal-laws/fr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..645bd299acf39a6f5482efbf8ec10b32c6da167d --- /dev/null +++ b/loras/legal-laws/fr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:656bfb86e940d85a4cb70276f28674269594d78534de916f2491b30c46b5e5ae +size 5333085 diff --git a/loras/legal-laws/fr/pytorch_model_head.bin b/loras/legal-laws/fr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..3d5f931fa7fbdc9dfc7b958b60c9ab011c88c6fa --- /dev/null +++ b/loras/legal-laws/fr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:daf8f81327a900f82bc7fa5aaee1c1ba5526e9958b5eadb61809f9b25249a592 +size 342547 diff --git a/loras/legal-laws/it/adapter_config.json b/loras/legal-laws/it/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-laws/it/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws/it/head_config.json b/loras/legal-laws/it/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-laws/it/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws/it/pytorch_adapter.bin b/loras/legal-laws/it/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..72280d2463f76e693aa3e09c48c0ef8f2b344823 --- /dev/null +++ b/loras/legal-laws/it/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c132bc745db44afd512608d21c0a1cdb3d5a6b9f5ac9c779b30c2466e369724a +size 5333085 diff --git a/loras/legal-laws/it/pytorch_model_head.bin b/loras/legal-laws/it/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..2902c2132eb4bece484d5906e11e81a7b733cfbd --- /dev/null +++ b/loras/legal-laws/it/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:952bab911bc9702ad23c602e06a4bdd3a6b8b9b5c8c430340f0c3e4ef146526c +size 342547 diff --git a/loras/legal-laws/pt/adapter_config.json b/loras/legal-laws/pt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/legal-laws/pt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws/pt/head_config.json b/loras/legal-laws/pt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/legal-laws/pt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/legal-laws/pt/pytorch_adapter.bin b/loras/legal-laws/pt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..e6f23c79684e1e608e180a4c8e42a5b2ff8b5d8c --- /dev/null +++ b/loras/legal-laws/pt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b3d3eb6011b724113b7730c619b8803fc4843719817719c8ce63a1feaae3a985 +size 5333085 diff --git a/loras/legal-laws/pt/pytorch_model_head.bin b/loras/legal-laws/pt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..300b762e509c3933f5bbf127256455f1cb947fa0 --- /dev/null +++ b/loras/legal-laws/pt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:03245a04681ee07aa71484b5fb7a9163370db24fab9fd8c2257b87e1134374c4 +size 342547 diff --git a/loras/opus100/af/adapter_config.json b/loras/opus100/af/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/af/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/af/head_config.json b/loras/opus100/af/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/af/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/af/pytorch_adapter.bin b/loras/opus100/af/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..e1d5d1da99aee7b73665ea3d8dcd07b17b608320 --- /dev/null +++ b/loras/opus100/af/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a7b02dbc84c3f6c97f2d56fefc2de4d844f953ba8e987fcb2d0ee592cbd314d4 +size 5333085 diff --git a/loras/opus100/af/pytorch_model_head.bin b/loras/opus100/af/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f48f8c58f34cc6443e62b4fd4cf28ccbbacfc2e2 --- /dev/null +++ b/loras/opus100/af/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6733cc32ddcd588fe3a6186c37a3953b41c32739738a2bf9411ea887bb54f7ea +size 342547 diff --git a/loras/opus100/am/adapter_config.json b/loras/opus100/am/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/am/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/am/head_config.json b/loras/opus100/am/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/am/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/am/pytorch_adapter.bin b/loras/opus100/am/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f4dcf6f2963600fae194e0bab71ed9354465c972 --- /dev/null +++ b/loras/opus100/am/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:817d46c231b20e1c5198f6cf2a40c0684a95443d46087c8d7893697a42cef0ec +size 5333085 diff --git a/loras/opus100/am/pytorch_model_head.bin b/loras/opus100/am/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..c2585d2077feb12698de40f71cbe1ca4b6438284 --- /dev/null +++ b/loras/opus100/am/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:94c37eadaf36a6a58038af9ed42a98721ec3f7a720ca645c3d4117a40cf5f013 +size 342547 diff --git a/loras/opus100/ar/adapter_config.json b/loras/opus100/ar/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ar/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ar/head_config.json b/loras/opus100/ar/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ar/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ar/pytorch_adapter.bin b/loras/opus100/ar/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..99f72cbe3f77c9d4902576fe3cc36cbb92d3483b --- /dev/null +++ b/loras/opus100/ar/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c64df93335a48edc9ed25603ba68f80f1602bb72bd4d4042d37ede001045f221 +size 5333085 diff --git a/loras/opus100/ar/pytorch_model_head.bin b/loras/opus100/ar/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..a00e7b7673d74437519a0fda76df467f22a7c57e --- /dev/null +++ b/loras/opus100/ar/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d3babce60e82e3a3c8197ce1d6788c5b5b05548966b980058da046d51d0687a5 +size 342547 diff --git a/loras/opus100/az/adapter_config.json b/loras/opus100/az/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/az/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/az/head_config.json b/loras/opus100/az/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/az/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/az/pytorch_adapter.bin b/loras/opus100/az/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..d211755f007e0b0e404c84cb9201dfb4d0008ed0 --- /dev/null +++ b/loras/opus100/az/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16cedf45b110b63a1b2167ef698ee8ca07f15ad6eff124b8e626206e39a5d31b +size 5333085 diff --git a/loras/opus100/az/pytorch_model_head.bin b/loras/opus100/az/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ca3e14c42dbff1112359f9a77a579231aa22a19c --- /dev/null +++ b/loras/opus100/az/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b94fe6dc223d72101e7b1f19d19e33abcc5ae5fa0071e392bd393edfa7245b86 +size 342547 diff --git a/loras/opus100/be/adapter_config.json b/loras/opus100/be/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/be/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/be/head_config.json b/loras/opus100/be/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/be/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/be/pytorch_adapter.bin b/loras/opus100/be/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..fca9205917ac28c74dfba6f68a26255670dd8cf5 --- /dev/null +++ b/loras/opus100/be/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ad0858f11926f131aeeaffbc3c285266d03ac622b64d513d19161fda42231d49 +size 5333085 diff --git a/loras/opus100/be/pytorch_model_head.bin b/loras/opus100/be/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..3bf7e3e7cb8bd9a247f85d8d5dcc8dc04edaa0ea --- /dev/null +++ b/loras/opus100/be/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a0e42fb584352eda662f2e966186d75b7564fa9613a5f666e4e7d6c9570ae813 +size 342547 diff --git a/loras/opus100/bg/adapter_config.json b/loras/opus100/bg/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/bg/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/bg/head_config.json b/loras/opus100/bg/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/bg/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/bg/pytorch_adapter.bin b/loras/opus100/bg/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..090bb7ce50652a722cff278fe1679110d3b29497 --- /dev/null +++ b/loras/opus100/bg/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1cccf91168d54be175dba54103f870348dafacb947f22a19f1d79bcb0e765caa +size 5333085 diff --git a/loras/opus100/bg/pytorch_model_head.bin b/loras/opus100/bg/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..7e111f9b0e79d0fd5f2d83237ca7067553e7800a --- /dev/null +++ b/loras/opus100/bg/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d8998c0662f03179f72908bc67f8546da44d973f61a5ca1d65cbb1e30af0f9b3 +size 342547 diff --git a/loras/opus100/bn/adapter_config.json b/loras/opus100/bn/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/bn/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/bn/head_config.json b/loras/opus100/bn/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/bn/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/bn/pytorch_adapter.bin b/loras/opus100/bn/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ed04dedffad3a93200c2581fb7f9e4bbacfab51f --- /dev/null +++ b/loras/opus100/bn/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b164da83b416412c4d9a9c1c70f6df2eab5df3df485cc6791b45ed3be4d079cd +size 5333085 diff --git a/loras/opus100/bn/pytorch_model_head.bin b/loras/opus100/bn/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..c8ac2db31091fbb5ec241beca4484955b96cc37e --- /dev/null +++ b/loras/opus100/bn/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:08837460378baee10084e03ec05f6f8b524c797a3c1ca7d00f6c06b5f4b04f54 +size 342547 diff --git a/loras/opus100/ca/adapter_config.json b/loras/opus100/ca/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ca/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ca/head_config.json b/loras/opus100/ca/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ca/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ca/pytorch_adapter.bin b/loras/opus100/ca/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..7d09fc82e835ed68061f81de310ae5981848dcf0 --- /dev/null +++ b/loras/opus100/ca/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd33b097f222234c03a2754f62bc89fcebf732881f17dfa960044fcb637a0c24 +size 5333085 diff --git a/loras/opus100/ca/pytorch_model_head.bin b/loras/opus100/ca/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..0b3b4ade679bdfa881c546454ac047d3cd105f89 --- /dev/null +++ b/loras/opus100/ca/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0914753339d261f5e1a42324ddddf5fd0dd2ca0a70047f67a3d69c8a33fcc80a +size 342547 diff --git a/loras/opus100/cs/adapter_config.json b/loras/opus100/cs/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/cs/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/cs/head_config.json b/loras/opus100/cs/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/cs/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/cs/pytorch_adapter.bin b/loras/opus100/cs/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f5310a3da3df06445dd5efd0773cdd7f05921a25 --- /dev/null +++ b/loras/opus100/cs/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:db980d78deaf0353663cf45b3da67f2d4fae246893f2bc754071c3c892bf158f +size 5333085 diff --git a/loras/opus100/cs/pytorch_model_head.bin b/loras/opus100/cs/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..cdc5ee6501fa2511b8104246bb4ebbb9ce9aedd1 --- /dev/null +++ b/loras/opus100/cs/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:371cc006102a4df7adc44f543a328a3f5952eedab279de4250b8a3ddb4d4440a +size 342547 diff --git a/loras/opus100/cy/adapter_config.json b/loras/opus100/cy/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/cy/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/cy/head_config.json b/loras/opus100/cy/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/cy/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/cy/pytorch_adapter.bin b/loras/opus100/cy/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..4760d5b33417bdeaa830ea28f7e978a42702d524 --- /dev/null +++ b/loras/opus100/cy/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:525f791d34a6ec642517314bde673bf173db0ac5e5f56c36b819cb2d23887cb6 +size 5333085 diff --git a/loras/opus100/cy/pytorch_model_head.bin b/loras/opus100/cy/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..07cb3fd1000bc87eacae57d3006876cd305d7fb2 --- /dev/null +++ b/loras/opus100/cy/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3838e1a5f97c3cf02965a97e6e9d3e350c52f06396db9ae135c647193e521807 +size 342547 diff --git a/loras/opus100/da/adapter_config.json b/loras/opus100/da/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/da/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/da/head_config.json b/loras/opus100/da/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/da/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/da/pytorch_adapter.bin b/loras/opus100/da/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..337f225d7801fcaaace8c3db071c4b63f4396f97 --- /dev/null +++ b/loras/opus100/da/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f67325c1f8ee01487910ffa984c08d514a747b8000a74983a09a665b64ca0b1d +size 5333085 diff --git a/loras/opus100/da/pytorch_model_head.bin b/loras/opus100/da/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..1339c56dba66476b41fa38cae4f8153a998b239d --- /dev/null +++ b/loras/opus100/da/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8283808cec06db06673111ce29ed7d72cb0d7e9c575342609d5dd7dc7d6ff253 +size 342547 diff --git a/loras/opus100/de/adapter_config.json b/loras/opus100/de/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/de/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/de/head_config.json b/loras/opus100/de/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/de/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/de/pytorch_adapter.bin b/loras/opus100/de/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..2c4d481eb6688a93d8896d866237939fa095df7c --- /dev/null +++ b/loras/opus100/de/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8d483a14e47cc1eb1e6db860621ba3de38ac0a81c1ce03770084d92874a93c0e +size 5333085 diff --git a/loras/opus100/de/pytorch_model_head.bin b/loras/opus100/de/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..81e0caf6f68ecec39a6c40c78e0fa0453b2d4ec9 --- /dev/null +++ b/loras/opus100/de/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f855e505b3532635bc96fe04290f35f2c7e9be94cab8f476f90b7011a9faa03e +size 342547 diff --git a/loras/opus100/el/adapter_config.json b/loras/opus100/el/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/el/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/el/head_config.json b/loras/opus100/el/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/el/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/el/pytorch_adapter.bin b/loras/opus100/el/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..b0b17084dd2f9a7f4ef42af3d8a5e19a2ad67e08 --- /dev/null +++ b/loras/opus100/el/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c56ad57deac1832b0c4fe2eed2f5f833a1b91e19583b392c231564271044843a +size 5333085 diff --git a/loras/opus100/el/pytorch_model_head.bin b/loras/opus100/el/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..0c1c9da993592d4d5b56a900825e3548dc672bc4 --- /dev/null +++ b/loras/opus100/el/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:023c5c67a15c438bbebbed892b5224f13aac40ffd8bec78c0822c6109c32efb1 +size 342547 diff --git a/loras/opus100/en/adapter_config.json b/loras/opus100/en/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/en/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/en/head_config.json b/loras/opus100/en/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/en/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/en/pytorch_adapter.bin b/loras/opus100/en/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..85a2e1a93228817d6e5e28f6fc0fda846c4f3b31 --- /dev/null +++ b/loras/opus100/en/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a9bc2fb753f234f940b96d398c43e2322227c1608756a078c2ebef702c27a8b3 +size 5333085 diff --git a/loras/opus100/en/pytorch_model_head.bin b/loras/opus100/en/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..9c6b41f3f3268a02ca6f4a5421291d88eaf6d926 --- /dev/null +++ b/loras/opus100/en/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:579eda913f74c43fac2ae16e2de00511dc47006d12377f1f1368d8d02bdddced +size 342547 diff --git a/loras/opus100/eo/adapter_config.json b/loras/opus100/eo/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/eo/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/eo/head_config.json b/loras/opus100/eo/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/eo/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/eo/pytorch_adapter.bin b/loras/opus100/eo/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..5352d2ade04fe5119a84d01d4eed0ede315ef834 --- /dev/null +++ b/loras/opus100/eo/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bfba0681f8fa7165f66d407393446a483b1adc42b9d67420a504357693d3c62a +size 5333085 diff --git a/loras/opus100/eo/pytorch_model_head.bin b/loras/opus100/eo/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..334f15b4e3fad76e4c421ac0c78e16aeac16ce13 --- /dev/null +++ b/loras/opus100/eo/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f00174fed60ef9cd3fa1805ea53183774d13da18777b4dbde6b33a66a88646a4 +size 342547 diff --git a/loras/opus100/es/adapter_config.json b/loras/opus100/es/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/es/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/es/head_config.json b/loras/opus100/es/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/es/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/es/pytorch_adapter.bin b/loras/opus100/es/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..234a3617dc695e9704fd6b6cee6c016e04f051a0 --- /dev/null +++ b/loras/opus100/es/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:810ba5f92ce9e7ae3205e90df5020b9964bf1e21f4509842254fd70a39d8c2f0 +size 5333085 diff --git a/loras/opus100/es/pytorch_model_head.bin b/loras/opus100/es/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..87942234befa36f9dac9fb455de7b8f9603324cd --- /dev/null +++ b/loras/opus100/es/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3a411fca035a5adc85bdb80ae8322bba026d456a2020bf3fe1e5be3deedc9011 +size 342547 diff --git a/loras/opus100/et/adapter_config.json b/loras/opus100/et/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/et/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/et/head_config.json b/loras/opus100/et/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/et/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/et/pytorch_adapter.bin b/loras/opus100/et/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..989414f391d85efbcb0667c4397cf804113b9921 --- /dev/null +++ b/loras/opus100/et/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:29027cd94ee16e1d4f5954eea0e39226e0e2198523d3542a5d57425998c5436b +size 5333085 diff --git a/loras/opus100/et/pytorch_model_head.bin b/loras/opus100/et/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..33ee1e8766e722b7cdaf6af6768b1e82947eb474 --- /dev/null +++ b/loras/opus100/et/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:31aa65f058c8b44e0ad1b1527fffa36028a2ad8f25a521d5cb06656daaad0e13 +size 342547 diff --git a/loras/opus100/eu/adapter_config.json b/loras/opus100/eu/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/eu/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/eu/head_config.json b/loras/opus100/eu/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/eu/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/eu/pytorch_adapter.bin b/loras/opus100/eu/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c2eadf572b6a29acb5583d4e78fd3ea8e497e40c --- /dev/null +++ b/loras/opus100/eu/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8dcb355ae5a99df14afdf8f0557510068af3ada7ee218656b6f14d3591311d6f +size 5333085 diff --git a/loras/opus100/eu/pytorch_model_head.bin b/loras/opus100/eu/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..7e149ba19583e223c6fc0c67381b0be680b86692 --- /dev/null +++ b/loras/opus100/eu/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:048213e93f905986ab6f4d921cd77edfb8a6e7e72155d4b0f330169c4af42a67 +size 342547 diff --git a/loras/opus100/fa/adapter_config.json b/loras/opus100/fa/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/fa/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/fa/head_config.json b/loras/opus100/fa/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/fa/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/fa/pytorch_adapter.bin b/loras/opus100/fa/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..51e09d1c41d4e7155183046ebeb1ce827b75df80 --- /dev/null +++ b/loras/opus100/fa/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ac14743e25acc84a34a7bef4a4e98836e0ec605d482ee50da8fa7e540a81039a +size 5333085 diff --git a/loras/opus100/fa/pytorch_model_head.bin b/loras/opus100/fa/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f2cf57e71af55f28e702ee5fa4b87f370019e4dc --- /dev/null +++ b/loras/opus100/fa/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a5416c52ec8ac6bdcc6e804c8e9cbe47d581a8107c71c26794c44ae212456e8d +size 342547 diff --git a/loras/opus100/fi/adapter_config.json b/loras/opus100/fi/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/fi/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/fi/head_config.json b/loras/opus100/fi/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/fi/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/fi/pytorch_adapter.bin b/loras/opus100/fi/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..6c0df06a1f27580fe6346df090493c4d35be1d6c --- /dev/null +++ b/loras/opus100/fi/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c168c7e1d36ed52e672a5bb5f7de653973e679ca9635c5a4662512d803b591a +size 5333085 diff --git a/loras/opus100/fi/pytorch_model_head.bin b/loras/opus100/fi/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..d825ecb441878de282d723016a55fac3f6703d73 --- /dev/null +++ b/loras/opus100/fi/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:56f640f3bf1ac556f96a06da7b042f91785bf92e2e7496282b39c980eeeadf86 +size 342547 diff --git a/loras/opus100/fy/adapter_config.json b/loras/opus100/fy/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/fy/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/fy/head_config.json b/loras/opus100/fy/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/fy/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/fy/pytorch_adapter.bin b/loras/opus100/fy/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..324c7e368661f8310c386c8ff2d067807fdb5836 --- /dev/null +++ b/loras/opus100/fy/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0edf937734130b9da51a8d2f2655753c462c08308bc7c98605b62ef748ddfcc8 +size 5333085 diff --git a/loras/opus100/fy/pytorch_model_head.bin b/loras/opus100/fy/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..470b05c3139bd1790d89d62e08fa35e1c43f750a --- /dev/null +++ b/loras/opus100/fy/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c0bf4ddda89d18a49d40aaf56eebe028f765923b9da60d82ecd3c89a24629483 +size 342547 diff --git a/loras/opus100/ga/adapter_config.json b/loras/opus100/ga/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ga/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ga/head_config.json b/loras/opus100/ga/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ga/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ga/pytorch_adapter.bin b/loras/opus100/ga/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..4defa8957400143e0be5bd03da2ecc06f7e72b70 --- /dev/null +++ b/loras/opus100/ga/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:15234c27474c6215004284fb7ca90faf0a3abf6a945add56d5350c6baa262fdf +size 5333085 diff --git a/loras/opus100/ga/pytorch_model_head.bin b/loras/opus100/ga/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ba29964f7847ba0e4fc2b073b71c1a02c2440727 --- /dev/null +++ b/loras/opus100/ga/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:44f6a07141d80cb8a7420b1899b2f14bae8947614abf6312133a4c3ff5443009 +size 342547 diff --git a/loras/opus100/gd/adapter_config.json b/loras/opus100/gd/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/gd/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/gd/head_config.json b/loras/opus100/gd/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/gd/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/gd/pytorch_adapter.bin b/loras/opus100/gd/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..eec882eda4259582fa9de58385e49653bbd4d100 --- /dev/null +++ b/loras/opus100/gd/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ca39a52f89d02d7a1ec66b7d25d47add22dfad1b7a7a489468cf91733b3c4f4 +size 5333085 diff --git a/loras/opus100/gd/pytorch_model_head.bin b/loras/opus100/gd/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..3e8121a8a278f123badb892bfc288eae007de9ee --- /dev/null +++ b/loras/opus100/gd/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:875e72bfbb96e8bca18d35c5c8cd330ef598f7b4953b2b46063e1ef4858675b2 +size 342547 diff --git a/loras/opus100/gl/adapter_config.json b/loras/opus100/gl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/gl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/gl/head_config.json b/loras/opus100/gl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/gl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/gl/pytorch_adapter.bin b/loras/opus100/gl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..a81b174dfdf336fda175a36529ddba0c76a3ba17 --- /dev/null +++ b/loras/opus100/gl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a274e81b469cdfd4a58623419d31b3e7983a839e397c7480fd8d721c15f04463 +size 5333085 diff --git a/loras/opus100/gl/pytorch_model_head.bin b/loras/opus100/gl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..2f537c7ac89273949aebf154146aff48947fba1a --- /dev/null +++ b/loras/opus100/gl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5f00fb8463bb74ec60d69ca43a9d90848e5c845d4c14b1aa456d6ae553e36ef +size 342547 diff --git a/loras/opus100/gu/adapter_config.json b/loras/opus100/gu/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/gu/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/gu/head_config.json b/loras/opus100/gu/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/gu/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/gu/pytorch_adapter.bin b/loras/opus100/gu/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..21b1437b2ec23bd4e65442b6bc68d5634acff31b --- /dev/null +++ b/loras/opus100/gu/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:707835ae4018d830f815b660d84c7ded4e8d281644c34f59eb6563e5869cfa07 +size 5333085 diff --git a/loras/opus100/gu/pytorch_model_head.bin b/loras/opus100/gu/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..6f2b014038f6fca6bfb77108957a6f165d09567a --- /dev/null +++ b/loras/opus100/gu/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9fac6bb324d8b80bd8c6581d3e1feedf014df1363f5e11487e2bba94b771a22b +size 342547 diff --git a/loras/opus100/ha/adapter_config.json b/loras/opus100/ha/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ha/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ha/head_config.json b/loras/opus100/ha/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ha/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ha/pytorch_adapter.bin b/loras/opus100/ha/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..789c7791e2242ad4fc7046ab2ff71dfe454716a7 --- /dev/null +++ b/loras/opus100/ha/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3fd195cfc94a00efc2a73b5c2cdf0ed54b5e35451ad9078f2ba5d5d22a050b10 +size 5333085 diff --git a/loras/opus100/ha/pytorch_model_head.bin b/loras/opus100/ha/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e24375e430325290b915faaa5d35c834a6ba4ae1 --- /dev/null +++ b/loras/opus100/ha/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f2575c8ab8cf6b4eb9aeb622713e508e81d98410ddb1c47df71ca3bbde434590 +size 342547 diff --git a/loras/opus100/he/adapter_config.json b/loras/opus100/he/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/he/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/he/head_config.json b/loras/opus100/he/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/he/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/he/pytorch_adapter.bin b/loras/opus100/he/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f1f929420733f340d3327e1973e4d1ac5f8f7855 --- /dev/null +++ b/loras/opus100/he/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2330deff302fb431b0ef8218383727230d36ce67fe945192b156b82e6621806 +size 5333085 diff --git a/loras/opus100/he/pytorch_model_head.bin b/loras/opus100/he/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..78d3bc66410ae6f00cb8754ba52530589669c324 --- /dev/null +++ b/loras/opus100/he/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0ac78735e79c1ce70fd7bec4eed67ff95ed14ca70b918123f325d927dfd8a8cf +size 342547 diff --git a/loras/opus100/hi/adapter_config.json b/loras/opus100/hi/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/hi/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/hi/head_config.json b/loras/opus100/hi/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/hi/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/hi/pytorch_adapter.bin b/loras/opus100/hi/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..0c52c74a76e5a312b67cc850684db0378f26e719 --- /dev/null +++ b/loras/opus100/hi/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1ed9d4d15dfbbca7f98efea13a24518ca71940ef9530736dc1820f279ff75687 +size 5333085 diff --git a/loras/opus100/hi/pytorch_model_head.bin b/loras/opus100/hi/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..fea4ab3425e952f2bee9d52d6269bce55769b278 --- /dev/null +++ b/loras/opus100/hi/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8772146192a0746f65b2134eab6e43db6f3c35a9451838a626b86df483d82b85 +size 342547 diff --git a/loras/opus100/hu/adapter_config.json b/loras/opus100/hu/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/hu/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/hu/head_config.json b/loras/opus100/hu/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/hu/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/hu/pytorch_adapter.bin b/loras/opus100/hu/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ae149ff58d79fca0dc3e545385cbc5be30bbdf23 --- /dev/null +++ b/loras/opus100/hu/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:db9d11c47a42f9ba3054b314a7d1cf07c59e77167883793866fa72d1967eebd0 +size 5333085 diff --git a/loras/opus100/hu/pytorch_model_head.bin b/loras/opus100/hu/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..c034aeefb2bea0e7e7563a28ed7f0579993ddb43 --- /dev/null +++ b/loras/opus100/hu/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c8fc4e2df814f5ccc54d3b2145369bd58e48ad826fd014802fbd5150ded6600f +size 342547 diff --git a/loras/opus100/id/adapter_config.json b/loras/opus100/id/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/id/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/id/head_config.json b/loras/opus100/id/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/id/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/id/pytorch_adapter.bin b/loras/opus100/id/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..b066ac4ba367c16b9d580b880e4082108ffe02e3 --- /dev/null +++ b/loras/opus100/id/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a22b4f8d832f3980b8e016714df41efa219bec6a3c9186223d2d146be0b292a +size 5333085 diff --git a/loras/opus100/id/pytorch_model_head.bin b/loras/opus100/id/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..177e1e274c4467a344ec32bc928485abda4a9fbe --- /dev/null +++ b/loras/opus100/id/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1d6555c19ee3789d80cc36a0b95f087e24e0762a354c00e76cb81e27c00103fa +size 342547 diff --git a/loras/opus100/ig/adapter_config.json b/loras/opus100/ig/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ig/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ig/head_config.json b/loras/opus100/ig/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ig/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ig/pytorch_adapter.bin b/loras/opus100/ig/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..49bd536634c13590edea7f20880d4b9517e7a20b --- /dev/null +++ b/loras/opus100/ig/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0169cc0e5f390ab8ba15dab0bd3674fd782412943826a11a0a16cd9b1208f1c2 +size 5333085 diff --git a/loras/opus100/ig/pytorch_model_head.bin b/loras/opus100/ig/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ae8e9be2086e0a32ac0e4cab3964e69a3263e24c --- /dev/null +++ b/loras/opus100/ig/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8bfda8491be4246880aa92cf7eaf6be5f2773f0656f25d14259860f887ff3b85 +size 342547 diff --git a/loras/opus100/is/adapter_config.json b/loras/opus100/is/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/is/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/is/head_config.json b/loras/opus100/is/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/is/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/is/pytorch_adapter.bin b/loras/opus100/is/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..a0e7ccdbd80b2bf067e52957fb0ac45839c84691 --- /dev/null +++ b/loras/opus100/is/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:431cb9eadec2704d4857dd62c49da34a49d467bfaf91000196fd2b1c1c062300 +size 5333085 diff --git a/loras/opus100/is/pytorch_model_head.bin b/loras/opus100/is/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..876f3c47ba0afe5fd04cb74eebc1851d6125ae46 --- /dev/null +++ b/loras/opus100/is/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:67ee044475d389e16e208b090424520ef7cdcb45b2115ea93f8d2aefcfc1d538 +size 342547 diff --git a/loras/opus100/it/adapter_config.json b/loras/opus100/it/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/it/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/it/head_config.json b/loras/opus100/it/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/it/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/it/pytorch_adapter.bin b/loras/opus100/it/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f116438f0d52299f935b2b97da411346ad082d91 --- /dev/null +++ b/loras/opus100/it/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3badfbffbc21211264110d238b9311951c2c9e3ca036bae9062436081c28b06b +size 5333085 diff --git a/loras/opus100/it/pytorch_model_head.bin b/loras/opus100/it/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..1622a09ebd520d3d07cc974ade5266d0a7907da8 --- /dev/null +++ b/loras/opus100/it/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8808580cb53748dbdc086fa8303a764309e08399ad7848586f58bb0c1a63fce2 +size 342547 diff --git a/loras/opus100/ja/adapter_config.json b/loras/opus100/ja/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ja/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ja/head_config.json b/loras/opus100/ja/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ja/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ja/pytorch_adapter.bin b/loras/opus100/ja/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..fb672bbae3076284524e7955b981da44a37bf60f --- /dev/null +++ b/loras/opus100/ja/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:336a8fa3d5d0aaae39262934eb941ff4f2ba82496b18291f31bc7546875e9756 +size 5333085 diff --git a/loras/opus100/ja/pytorch_model_head.bin b/loras/opus100/ja/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..324430261578e4fe799cbcd8fa064a3d1adf03eb --- /dev/null +++ b/loras/opus100/ja/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:739a7596abfded748cd432d5c32cd4428ab48094298d4bce491c5eae338ba39d +size 342547 diff --git a/loras/opus100/ka/adapter_config.json b/loras/opus100/ka/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ka/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ka/head_config.json b/loras/opus100/ka/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ka/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ka/pytorch_adapter.bin b/loras/opus100/ka/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..6da8b6247a73a2227718a6b5a5ceda1cb182220b --- /dev/null +++ b/loras/opus100/ka/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0df481e5cf4e83f3e371c6870be56c6cfc27fbe7f8cd5f94fc09000e2f1989aa +size 5333085 diff --git a/loras/opus100/ka/pytorch_model_head.bin b/loras/opus100/ka/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..3815b01fd65579b4dc7a83690b9f7af82f655c44 --- /dev/null +++ b/loras/opus100/ka/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a9b3969a0d5c5335f557c4096df745871f7ee88b80fc8cb7f405d2bab2ac4f96 +size 342547 diff --git a/loras/opus100/kk/adapter_config.json b/loras/opus100/kk/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/kk/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/kk/head_config.json b/loras/opus100/kk/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/kk/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/kk/pytorch_adapter.bin b/loras/opus100/kk/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f746482a1ea847d8d2ec2a45b4d7182dee584751 --- /dev/null +++ b/loras/opus100/kk/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:262c289ac67ddec5b60d5c603865a0ef9f94a6b1d02e0c12352ff58988e26c95 +size 5333085 diff --git a/loras/opus100/kk/pytorch_model_head.bin b/loras/opus100/kk/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..466c1b087a2cb11d4c7ca591df93ad2b6e34dbf1 --- /dev/null +++ b/loras/opus100/kk/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1bdca96225ffadc6871ebe4ffbde13d02d6303e09e5de97ea76bbfcadcaf7998 +size 342547 diff --git a/loras/opus100/km/adapter_config.json b/loras/opus100/km/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/km/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/km/head_config.json b/loras/opus100/km/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/km/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/km/pytorch_adapter.bin b/loras/opus100/km/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..d81d67c779329deada79829aaabd93e5196e310f --- /dev/null +++ b/loras/opus100/km/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5184dbac8b1f6f8b009b19e199271e10b03eacebde412a0af06f9ce5c29c03c6 +size 5333085 diff --git a/loras/opus100/km/pytorch_model_head.bin b/loras/opus100/km/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..15388e1c0964148e7b6f568d4c34212b03f164f8 --- /dev/null +++ b/loras/opus100/km/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0ce3aaaf1be52cd30658cd5194be2922fbf5d6526147e31d00b064e4e3b96bfd +size 342547 diff --git a/loras/opus100/kn/adapter_config.json b/loras/opus100/kn/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/kn/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/kn/head_config.json b/loras/opus100/kn/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/kn/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/kn/pytorch_adapter.bin b/loras/opus100/kn/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..e4565d41202ed10a21ee06e210207f406f27b7d2 --- /dev/null +++ b/loras/opus100/kn/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:364e87202e544c71ebd409a503bbeb3f8619e0f87cdc94f230987f79a0a35379 +size 5333085 diff --git a/loras/opus100/kn/pytorch_model_head.bin b/loras/opus100/kn/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..bb24b8bcedad092d923be81e9e6b4c119960c035 --- /dev/null +++ b/loras/opus100/kn/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:46a4ec215fa07c0bbe80a2b9305d01ce2868a3fe233d5a794bd0b51a006a0ad8 +size 342547 diff --git a/loras/opus100/ko/adapter_config.json b/loras/opus100/ko/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ko/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ko/head_config.json b/loras/opus100/ko/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ko/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ko/pytorch_adapter.bin b/loras/opus100/ko/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..0f2832d35eb7ec4255a551d35d26934dd456ebd7 --- /dev/null +++ b/loras/opus100/ko/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f456db126b09b5132656e238e1829ed0295e7adb8053585c089da9d8fe36dd78 +size 5333085 diff --git a/loras/opus100/ko/pytorch_model_head.bin b/loras/opus100/ko/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e3eec1f27b97d16dc49e1962f7247babea09b36d --- /dev/null +++ b/loras/opus100/ko/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6a05bb743789262b28872622c5e9665289f138be38d9b957ab630b80d8996ed9 +size 342547 diff --git a/loras/opus100/ku/adapter_config.json b/loras/opus100/ku/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ku/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ku/head_config.json b/loras/opus100/ku/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ku/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ku/pytorch_adapter.bin b/loras/opus100/ku/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..7cb2ce1028b0e0edc6451e1b1f6ce0495e523110 --- /dev/null +++ b/loras/opus100/ku/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b455bd397b1405728552971ba62e350db2a20e984ab8df6ba6000efc4d63738 +size 5333085 diff --git a/loras/opus100/ku/pytorch_model_head.bin b/loras/opus100/ku/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..d2c7455adaa6c226d8009b58d4c62825a38acc31 --- /dev/null +++ b/loras/opus100/ku/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:36052224cf3b2a5217a47fe1b7ea26204915ff1e4ca99c90666c1f8cd86268cc +size 342547 diff --git a/loras/opus100/ky/adapter_config.json b/loras/opus100/ky/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ky/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ky/head_config.json b/loras/opus100/ky/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ky/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ky/pytorch_adapter.bin b/loras/opus100/ky/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..2ab5c71c1a49c5ea949b490a96a37eef4c6ea1a8 --- /dev/null +++ b/loras/opus100/ky/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3cfc4b995afd965c7730c560b923faed6db406380b67e6b0830e52647e6c9279 +size 5333085 diff --git a/loras/opus100/ky/pytorch_model_head.bin b/loras/opus100/ky/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..6a2f35f46f0a4b758d111cdfe6942ec4ecd3ed67 --- /dev/null +++ b/loras/opus100/ky/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c9523483a8656aa3a3a9c15060ab017cd396fe6787cb512cc28ae19cf91ea0bd +size 342547 diff --git a/loras/opus100/lt/adapter_config.json b/loras/opus100/lt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/lt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/lt/head_config.json b/loras/opus100/lt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/lt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/lt/pytorch_adapter.bin b/loras/opus100/lt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..50c13de93a751cabefa6236a69b283f05f10deca --- /dev/null +++ b/loras/opus100/lt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a3faf1a9148a6cc287ad14813b5f95205d997d265b004d65f124d49205daf7d +size 5333085 diff --git a/loras/opus100/lt/pytorch_model_head.bin b/loras/opus100/lt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..a070f95871619df5fe1f0cdf6e7c3d51de13c05a --- /dev/null +++ b/loras/opus100/lt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0cb6e90edb1c023291f5ebbb9473ec88f7912b5372720d0101e2390324c347a0 +size 342547 diff --git a/loras/opus100/lv/adapter_config.json b/loras/opus100/lv/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/lv/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/lv/head_config.json b/loras/opus100/lv/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/lv/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/lv/pytorch_adapter.bin b/loras/opus100/lv/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..5faccec7ce8d864d817905a7e091bd0ccee7123b --- /dev/null +++ b/loras/opus100/lv/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a3663a6638b43d0856a4acc8fd7cf1a6fec8b5a68e05d6c1c021d8da34c52ee +size 5333085 diff --git a/loras/opus100/lv/pytorch_model_head.bin b/loras/opus100/lv/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..995d6321eb8edfa6629c9bbb0ff715d918bc4184 --- /dev/null +++ b/loras/opus100/lv/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a657d5472b64c7dfdffdcc8cb5fb395775e0246dbf0eb9272d22faec40747aef +size 342547 diff --git a/loras/opus100/mg/adapter_config.json b/loras/opus100/mg/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/mg/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/mg/head_config.json b/loras/opus100/mg/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/mg/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/mg/pytorch_adapter.bin b/loras/opus100/mg/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..e4ec5264564a745ac868ee396a57e2bb0967d4b5 --- /dev/null +++ b/loras/opus100/mg/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d04642fd102abebb7fb35e9fdd7fe7230919e2edd8643276a75ab202da26ecf +size 5333085 diff --git a/loras/opus100/mg/pytorch_model_head.bin b/loras/opus100/mg/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f83f52de14ea0170d4ad1ea9f5566f4a0dafeb6b --- /dev/null +++ b/loras/opus100/mg/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:565a369e89de7570da9f0339764608a4aab77b403d6f9a795a32df3f8dff9c27 +size 342547 diff --git a/loras/opus100/mk/adapter_config.json b/loras/opus100/mk/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/mk/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/mk/head_config.json b/loras/opus100/mk/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/mk/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/mk/pytorch_adapter.bin b/loras/opus100/mk/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f67161ff0d990ce195af811154618fbd4776d82f --- /dev/null +++ b/loras/opus100/mk/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a18a0a87474dea87d41eb26b893dd79e3d9c80007904dc856b0a91f7affa7cd1 +size 5333085 diff --git a/loras/opus100/mk/pytorch_model_head.bin b/loras/opus100/mk/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..98946ac390e907ce85ed6b2833f0722b3bca854a --- /dev/null +++ b/loras/opus100/mk/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b02678f7cbddbb3246e4880168a04cf4187743aa80302668e188f45f9ee619a7 +size 342547 diff --git a/loras/opus100/ml/adapter_config.json b/loras/opus100/ml/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ml/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ml/head_config.json b/loras/opus100/ml/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ml/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ml/pytorch_adapter.bin b/loras/opus100/ml/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..03c10556660c44e415dd9efd2a14ac0edbb4251d --- /dev/null +++ b/loras/opus100/ml/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82d7ba9da87bc4fce0b896244ca0b27ec4df4c01778d28dc890c664b83fc6723 +size 5333085 diff --git a/loras/opus100/ml/pytorch_model_head.bin b/loras/opus100/ml/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..a548debe0030bacb36a89b2441fa86ae541a8050 --- /dev/null +++ b/loras/opus100/ml/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8761e6225f597710873f3c621705e5b571bab1a755d79196c2f24919063ee41e +size 342547 diff --git a/loras/opus100/mr/adapter_config.json b/loras/opus100/mr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/mr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/mr/head_config.json b/loras/opus100/mr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/mr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/mr/pytorch_adapter.bin b/loras/opus100/mr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..e74102d92a43671949442c5da0239105c72f89bc --- /dev/null +++ b/loras/opus100/mr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ee92cfc7b976db39eb58a6ed46d8f98cf2c27353a973ded01456380893eff5c +size 5333085 diff --git a/loras/opus100/mr/pytorch_model_head.bin b/loras/opus100/mr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..32b14dc0f518c6acff2db41394e6b69eddf072f2 --- /dev/null +++ b/loras/opus100/mr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:56b2b2fada9c5f4b60515fffb7700e1ec1e4c26ba94bcd984ae44c3841d9b3e5 +size 342547 diff --git a/loras/opus100/ms/adapter_config.json b/loras/opus100/ms/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ms/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ms/head_config.json b/loras/opus100/ms/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ms/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ms/pytorch_adapter.bin b/loras/opus100/ms/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..514f9332e6c6ca9e6ceee3eecc8bdd1db7ca73fc --- /dev/null +++ b/loras/opus100/ms/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c42ac8ed0f3d5377dbfd0928e31bc140ce1e7d51a9d91c6c4920ea7fac5f3692 +size 5333085 diff --git a/loras/opus100/ms/pytorch_model_head.bin b/loras/opus100/ms/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..46bb08eeb488df9395f9673e7c438ff656231aab --- /dev/null +++ b/loras/opus100/ms/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:25b2f8cde1ca6e99f9d223daaa84774c0bfffb0522a6c58bd56a31c35e1ed173 +size 342547 diff --git a/loras/opus100/mt/adapter_config.json b/loras/opus100/mt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/mt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/mt/head_config.json b/loras/opus100/mt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/mt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/mt/pytorch_adapter.bin b/loras/opus100/mt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..469dc084f2a6cd36658d128933f8b6ffdc5b23e3 --- /dev/null +++ b/loras/opus100/mt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:19e1ed51dc0778a0c2f00d56a9badb41a272fb2826f079ad56abf3ecb14aac0a +size 5333085 diff --git a/loras/opus100/mt/pytorch_model_head.bin b/loras/opus100/mt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..725a670e0b123639f16d6a0711e65070cb97827a --- /dev/null +++ b/loras/opus100/mt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:029f9367788f41d3ea0c1c08d32332356ca98efd29b300ff1c800e4291f42892 +size 342547 diff --git a/loras/opus100/my/adapter_config.json b/loras/opus100/my/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/my/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/my/head_config.json b/loras/opus100/my/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/my/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/my/pytorch_adapter.bin b/loras/opus100/my/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..096177ceff7acdf96306ba90e502f3d8e9f6c8e8 --- /dev/null +++ b/loras/opus100/my/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ec5d0262f97b6ef18f6af60b40088298d6446cb33e19fc363dc99a0a39c30c53 +size 5333085 diff --git a/loras/opus100/my/pytorch_model_head.bin b/loras/opus100/my/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..4bf06284b90a988249cfce75bbbc0f0070f02db2 --- /dev/null +++ b/loras/opus100/my/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d7bcd1119dc25daa8706d98bf79bc69c61865a74288f00a668a7fa692242d687 +size 342547 diff --git a/loras/opus100/ne/adapter_config.json b/loras/opus100/ne/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ne/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ne/head_config.json b/loras/opus100/ne/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ne/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ne/pytorch_adapter.bin b/loras/opus100/ne/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..62ca648ffd4d2fb5bb4036695efce396a8fe4dfc --- /dev/null +++ b/loras/opus100/ne/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e0c6a866e6eda6aa69fa1a15d6afc88e026e5dd7c58ed98523810f4f94bf7f2b +size 5333085 diff --git a/loras/opus100/ne/pytorch_model_head.bin b/loras/opus100/ne/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..4ea563bb70a4232b6fd9bf9c6f3b8021c639b019 --- /dev/null +++ b/loras/opus100/ne/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:12c54de8b493b68a5341b07587176f9cf4f80765b0f89d16585f0970fb6257ec +size 342547 diff --git a/loras/opus100/no/adapter_config.json b/loras/opus100/no/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/no/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/no/head_config.json b/loras/opus100/no/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/no/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/no/pytorch_adapter.bin b/loras/opus100/no/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..cbae3157c22488b92dead7dc9f64d433cfa8e46f --- /dev/null +++ b/loras/opus100/no/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:654869067830d89122ffefe4a052b0352b93489992b50f47b1d8caaaec784d0e +size 5333085 diff --git a/loras/opus100/no/pytorch_model_head.bin b/loras/opus100/no/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..8399f72c9a4f21e1e200b6c2b1ac20a7fdf48a92 --- /dev/null +++ b/loras/opus100/no/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8e7e38f9ce32a95422a086964d7c91bc3ac4f64de258c252463185a310da7879 +size 342547 diff --git a/loras/opus100/pa/adapter_config.json b/loras/opus100/pa/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/pa/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/pa/head_config.json b/loras/opus100/pa/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/pa/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/pa/pytorch_adapter.bin b/loras/opus100/pa/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..7074e600ca3add5a8f0e9e6e9bfc4c5ae9c71fa5 --- /dev/null +++ b/loras/opus100/pa/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:609709f370464bcc27eb0aea5bb6ecf364b92112b330e25e5ec2594f0f0eedae +size 5333085 diff --git a/loras/opus100/pa/pytorch_model_head.bin b/loras/opus100/pa/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..a330a4b976730a114d9036de274000540d81574c --- /dev/null +++ b/loras/opus100/pa/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7e55b912ce618d2b009a08d84435e9abafd8e07b127a558024074bc98363e6a +size 342547 diff --git a/loras/opus100/pl/adapter_config.json b/loras/opus100/pl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/pl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/pl/head_config.json b/loras/opus100/pl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/pl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/pl/pytorch_adapter.bin b/loras/opus100/pl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..2866cce8d5dde3c57b55f1396fa5fafbe66fff29 --- /dev/null +++ b/loras/opus100/pl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:415ea4b737974e30632b7baaba30c3faa9ee3d4e3cc55ea8a377ceb1f49679c5 +size 5333085 diff --git a/loras/opus100/pl/pytorch_model_head.bin b/loras/opus100/pl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..7e5eaf0f74ed67cb34801ca82715bb1d779455c3 --- /dev/null +++ b/loras/opus100/pl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:36b1872d9ef5b17d02cb0bde04b0bff6e47fc2ce25090cab74f2f843c78f53b3 +size 342547 diff --git a/loras/opus100/ps/adapter_config.json b/loras/opus100/ps/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ps/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ps/head_config.json b/loras/opus100/ps/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ps/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ps/pytorch_adapter.bin b/loras/opus100/ps/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..10ccfddad6188b4382bf1902ecf326a520e4b00c --- /dev/null +++ b/loras/opus100/ps/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ed3eaaa6515e2911373dbe3c15bd935b675296ca456496e9666f70a0a0b42d9e +size 5333085 diff --git a/loras/opus100/ps/pytorch_model_head.bin b/loras/opus100/ps/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..7c87e4c539921c394517e138af8a33b210ce092b --- /dev/null +++ b/loras/opus100/ps/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:974d78b2e674d1a984e45daffdd6394e65930360d3ed4c9363c669ec9e554b2a +size 342547 diff --git a/loras/opus100/pt/adapter_config.json b/loras/opus100/pt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/pt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/pt/head_config.json b/loras/opus100/pt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/pt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/pt/pytorch_adapter.bin b/loras/opus100/pt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..3c5af4cce919e6483cef669c36eee54f40d0e5dd --- /dev/null +++ b/loras/opus100/pt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5626d4c5af38ebe036e018a58bcf98901ccdc28ae836edb7242d687ca1cb109a +size 5333085 diff --git a/loras/opus100/pt/pytorch_model_head.bin b/loras/opus100/pt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..b5c126cb8cd23a8d21ea08846f100287e5a6fb0c --- /dev/null +++ b/loras/opus100/pt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b194bf5efef06a57296de47aa490e1366df458127464ced36b6cc363f4382b9f +size 342547 diff --git a/loras/opus100/ro/adapter_config.json b/loras/opus100/ro/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ro/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ro/head_config.json b/loras/opus100/ro/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ro/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ro/pytorch_adapter.bin b/loras/opus100/ro/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ee4b843051f096ce26a471792f0171c5e65125dc --- /dev/null +++ b/loras/opus100/ro/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e21b9683839f50872af82b2d5fcb59b8c77b8630a121656e3c312eca40088eb2 +size 5333085 diff --git a/loras/opus100/ro/pytorch_model_head.bin b/loras/opus100/ro/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..41ab2ed29f07422049f2d5c314c093ff85115d79 --- /dev/null +++ b/loras/opus100/ro/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5dfaa56c88740b61fd9097cf56a66398c45e469c58bacb85bb1ce9a9f8cbfb04 +size 342547 diff --git a/loras/opus100/si/adapter_config.json b/loras/opus100/si/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/si/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/si/head_config.json b/loras/opus100/si/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/si/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/si/pytorch_adapter.bin b/loras/opus100/si/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..debe418a5cfc4c432fa4a097ab7b1ded9a7aa805 --- /dev/null +++ b/loras/opus100/si/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:31c3623f4668ae249f4aa7a27146079431747782f728a276cbafa91b7265a1ec +size 5333085 diff --git a/loras/opus100/si/pytorch_model_head.bin b/loras/opus100/si/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..6ba7f51142b4d62adb319b2e5e55989628a7c6ab --- /dev/null +++ b/loras/opus100/si/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f5c5556cad414a764bb699fafe274b1b060cffdeda228bdd866dd3d2ce5214a +size 342547 diff --git a/loras/opus100/sk/adapter_config.json b/loras/opus100/sk/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/sk/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/sk/head_config.json b/loras/opus100/sk/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/sk/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/sk/pytorch_adapter.bin b/loras/opus100/sk/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..b8aae2dea6bc061dc2d0946465ca56c513a845cf --- /dev/null +++ b/loras/opus100/sk/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:78b78c0710340abddb430badf54d51c786c366edc0970de65cc42739a4ce3120 +size 5333085 diff --git a/loras/opus100/sk/pytorch_model_head.bin b/loras/opus100/sk/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..2d56e2398ae85a99bfb2a3bed7273e1c0f455ead --- /dev/null +++ b/loras/opus100/sk/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:00f2dc6feaeb8ed6ebd60e3293b772f5bb2d289bca477f56eb0a3fc5aa01079a +size 342547 diff --git a/loras/opus100/sl/adapter_config.json b/loras/opus100/sl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/sl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/sl/head_config.json b/loras/opus100/sl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/sl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/sl/pytorch_adapter.bin b/loras/opus100/sl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..673210496f1ff973a17625d1ff9d45b0de166b05 --- /dev/null +++ b/loras/opus100/sl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b236d56234ecad475968c7e1501deffa422ac1d7a0d49ebf84be0c5f9098fcc7 +size 5333085 diff --git a/loras/opus100/sl/pytorch_model_head.bin b/loras/opus100/sl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..248be4c53cc52db7c6c1bbf25e16308597ba322f --- /dev/null +++ b/loras/opus100/sl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5dd70ec039929dcba131da53f71552712bf17376d37f6fe5ec9df37cf191b59 +size 342547 diff --git a/loras/opus100/sq/adapter_config.json b/loras/opus100/sq/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/sq/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/sq/head_config.json b/loras/opus100/sq/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/sq/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/sq/pytorch_adapter.bin b/loras/opus100/sq/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..a82f9c358300dac148b7fad55d13a858f7d405e3 --- /dev/null +++ b/loras/opus100/sq/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bca70e969399e9220f0d53ea43c789ba537e7e8b76225284069b4771f1580b35 +size 5333085 diff --git a/loras/opus100/sq/pytorch_model_head.bin b/loras/opus100/sq/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..315204d1ffbf2ea21077e76db1af6f7824f3efe3 --- /dev/null +++ b/loras/opus100/sq/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7f6abcaa393c8afa672e83d57a3dca3e8cba81ddacad9f55388618dd8623172f +size 342547 diff --git a/loras/opus100/sr/adapter_config.json b/loras/opus100/sr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/sr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/sr/head_config.json b/loras/opus100/sr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/sr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/sr/pytorch_adapter.bin b/loras/opus100/sr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c7f66f4e3bf081f34492cb91bf4cb59d623c6d86 --- /dev/null +++ b/loras/opus100/sr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d804dd63b65bd427a377e95d3539efd874043e52c490ab01c51d6d568dcb6406 +size 5333085 diff --git a/loras/opus100/sr/pytorch_model_head.bin b/loras/opus100/sr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..4957f63aaea744ab0b2e301bcc152430e552c220 --- /dev/null +++ b/loras/opus100/sr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d88980f5c9872dfcafbfc94ff1c39da6836a40adfabdbca7d66afddeac7e6890 +size 342547 diff --git a/loras/opus100/sv/adapter_config.json b/loras/opus100/sv/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/sv/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/sv/head_config.json b/loras/opus100/sv/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/sv/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/sv/pytorch_adapter.bin b/loras/opus100/sv/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c7f96d5f7ecfc2fd8f409e676bfb6851b0af2bcc --- /dev/null +++ b/loras/opus100/sv/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5cb8f33b4b8d4d74a9e594f8a492ebc88ba4d32d8ac60d15bc4aea4305dfc6b2 +size 5333085 diff --git a/loras/opus100/sv/pytorch_model_head.bin b/loras/opus100/sv/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e61219af324a1f6e9eecd0d90db78bea61ca19ae --- /dev/null +++ b/loras/opus100/sv/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cbfb5bb071ebeed8e1405b6c7ed54b5cd1d8537c5ca496563ecfb49e3b63d45f +size 342547 diff --git a/loras/opus100/ta/adapter_config.json b/loras/opus100/ta/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ta/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ta/head_config.json b/loras/opus100/ta/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ta/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ta/pytorch_adapter.bin b/loras/opus100/ta/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ee5d37a4f24c59fd8544935c3c406a4217f257e0 --- /dev/null +++ b/loras/opus100/ta/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:696aa6c461a37a537ede04c2c94b263762ff9bb762b63a3c5342dea56eb9ee0a +size 5333085 diff --git a/loras/opus100/ta/pytorch_model_head.bin b/loras/opus100/ta/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e9a04acb7c7650aaaf1fdefb1836420e19075600 --- /dev/null +++ b/loras/opus100/ta/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be0fe8ed6d61b1a1f74f88a2936df7c9b52e619eec09ee4091222e60be3de2f8 +size 342547 diff --git a/loras/opus100/te/adapter_config.json b/loras/opus100/te/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/te/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/te/head_config.json b/loras/opus100/te/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/te/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/te/pytorch_adapter.bin b/loras/opus100/te/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..828eb02ba382781c007c7a454ddc70cc4c64ed36 --- /dev/null +++ b/loras/opus100/te/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:65649f22b257412746f480fa410cc9b4afa1ef7cd6e3bd59244f69ad5596a275 +size 5333085 diff --git a/loras/opus100/te/pytorch_model_head.bin b/loras/opus100/te/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ca134bf34ef0bf726f7f4176e00059d6d1e73af3 --- /dev/null +++ b/loras/opus100/te/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bb729805b2ce71f4da97481d3f6845768df8b75628ec4f0b3c2c9d476bfb62dc +size 342547 diff --git a/loras/opus100/tg/adapter_config.json b/loras/opus100/tg/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/tg/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/tg/head_config.json b/loras/opus100/tg/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/tg/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/tg/pytorch_adapter.bin b/loras/opus100/tg/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..200900eebb0968fb4911b73ac30b31dd79522747 --- /dev/null +++ b/loras/opus100/tg/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f52e4feb2be4199acca5d88ae519cdc87165746c878c39a65b11d28d2cecfe39 +size 5333085 diff --git a/loras/opus100/tg/pytorch_model_head.bin b/loras/opus100/tg/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e9145df10d0d1709c74124d3a4e1817c058bbcc1 --- /dev/null +++ b/loras/opus100/tg/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5c0f9f3fb94338cc24842ac15371c8841719283183aa161a30a045678be0307d +size 342547 diff --git a/loras/opus100/th/adapter_config.json b/loras/opus100/th/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/th/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/th/head_config.json b/loras/opus100/th/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/th/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/th/pytorch_adapter.bin b/loras/opus100/th/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..2515021a0cb52a4facbfd98b89c442108e8c9697 --- /dev/null +++ b/loras/opus100/th/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5da0b0915eae734dbc11c696660b40cb17e88d1017cafed8978ef93ed69cfa9d +size 5333085 diff --git a/loras/opus100/th/pytorch_model_head.bin b/loras/opus100/th/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..025afc50ea7421750659b3d4455bab8817a37267 --- /dev/null +++ b/loras/opus100/th/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1828eeb5f6340bf54aa31b28807c62c26571c85f323bb373432c1a3ea36f5210 +size 342547 diff --git a/loras/opus100/tr/adapter_config.json b/loras/opus100/tr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/tr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/tr/head_config.json b/loras/opus100/tr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/tr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/tr/pytorch_adapter.bin b/loras/opus100/tr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..5bf3919bf4ada10a5b2a8a10686d95b60cd91dc3 --- /dev/null +++ b/loras/opus100/tr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a0c12336bff4b4a740ddfa8e0979b6964410e6a76f4ae30e89afab7d3435b300 +size 5333085 diff --git a/loras/opus100/tr/pytorch_model_head.bin b/loras/opus100/tr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..7ddc448e281896289510db22fdc799ab16f0746c --- /dev/null +++ b/loras/opus100/tr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:817e648af0dfbbaf1341e9920495a86a19ffc7f244dc4f413cf0440d791d8001 +size 342547 diff --git a/loras/opus100/uk/adapter_config.json b/loras/opus100/uk/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/uk/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/uk/head_config.json b/loras/opus100/uk/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/uk/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/uk/pytorch_adapter.bin b/loras/opus100/uk/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f725a7edb538ea0ba3344de70df0b21b8c87cb59 --- /dev/null +++ b/loras/opus100/uk/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c9c968179c1e9bba95260e40140a965b7193e2c9c8340d95ab1668b8a88192f2 +size 5333085 diff --git a/loras/opus100/uk/pytorch_model_head.bin b/loras/opus100/uk/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..5be27075321e2a7bbd00b85f4a4a76a9e9f7a811 --- /dev/null +++ b/loras/opus100/uk/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:103eb7081769d7f78825dac874ecdde19a733b1f2d0ee808b6546ac46794c15e +size 342547 diff --git a/loras/opus100/ur/adapter_config.json b/loras/opus100/ur/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/ur/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ur/head_config.json b/loras/opus100/ur/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/ur/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/ur/pytorch_adapter.bin b/loras/opus100/ur/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..54e51e12d2fb585a38bb255559b39eb562202cd9 --- /dev/null +++ b/loras/opus100/ur/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f5b6bab611ea56c2afc527fb1e1bdef08eb808b5a792ec5a24d74fe0f9754bb +size 5333085 diff --git a/loras/opus100/ur/pytorch_model_head.bin b/loras/opus100/ur/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..5dce8fc77015c84cc187d6e70f7256ee44f9aecc --- /dev/null +++ b/loras/opus100/ur/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a59343357cc3c7417eb2a0c4ce1385501899cf5493f439f975d6435b03f2c75 +size 342547 diff --git a/loras/opus100/uz/adapter_config.json b/loras/opus100/uz/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/uz/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/uz/head_config.json b/loras/opus100/uz/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/uz/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/uz/pytorch_adapter.bin b/loras/opus100/uz/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..9a445ca5bfd06e7ed86f9377e0832801a71f9fb4 --- /dev/null +++ b/loras/opus100/uz/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5fec92637ff03369814c04a03ef7fee675292a0026c399e1c508d2c63866fcd7 +size 5333085 diff --git a/loras/opus100/uz/pytorch_model_head.bin b/loras/opus100/uz/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..1edf0ab94531781da84834dfc1e7bc7694fc5a3c --- /dev/null +++ b/loras/opus100/uz/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2a825adb97c1f6b1d2091e56bb6dd54fb1b927793b924b4ab25f30c7de7fe867 +size 342547 diff --git a/loras/opus100/vi/adapter_config.json b/loras/opus100/vi/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/vi/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/vi/head_config.json b/loras/opus100/vi/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/vi/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/vi/pytorch_adapter.bin b/loras/opus100/vi/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..b7f05626c0d409033984c61ed5b7aaa90cbe99dc --- /dev/null +++ b/loras/opus100/vi/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:98876fc1d8811f9ac1259c07f355a51e78ac9d9a56a3b3d513ea8a55f8cf02bd +size 5333085 diff --git a/loras/opus100/vi/pytorch_model_head.bin b/loras/opus100/vi/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..4911e7039174b249c0a23549a85fc7fbad60c08d --- /dev/null +++ b/loras/opus100/vi/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a4d0db9b34190fbaf5f760a717456de1f4cf3591522185638be50981f880e823 +size 342547 diff --git a/loras/opus100/xh/adapter_config.json b/loras/opus100/xh/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/xh/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/xh/head_config.json b/loras/opus100/xh/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/xh/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/xh/pytorch_adapter.bin b/loras/opus100/xh/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..5d599cb032c270534d35a94cb5353e0ed16bc879 --- /dev/null +++ b/loras/opus100/xh/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:749dd18becc48f1abff605c7a2eb1fb0f3684defa92174f03aed86a2aed02885 +size 5333085 diff --git a/loras/opus100/xh/pytorch_model_head.bin b/loras/opus100/xh/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..b8e40a7cee7b4597b8d07b85f9b653ae199e9e69 --- /dev/null +++ b/loras/opus100/xh/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d4f4da1a4907785b66aed11482751f8c35d4bca3e302bdcb1a8e17efb237507d +size 342547 diff --git a/loras/opus100/yi/adapter_config.json b/loras/opus100/yi/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/yi/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/yi/head_config.json b/loras/opus100/yi/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/yi/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/yi/pytorch_adapter.bin b/loras/opus100/yi/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..89c29e7b33990d45a909982f0990377e67ea9ab1 --- /dev/null +++ b/loras/opus100/yi/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:63b1d99e93ce72044f8f0fa61e3bc36e5331eb24e660b138cb72c8fa1b1e1e25 +size 5333085 diff --git a/loras/opus100/yi/pytorch_model_head.bin b/loras/opus100/yi/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ac8c25f3b62a04c94bff62b98ef55bddd323a736 --- /dev/null +++ b/loras/opus100/yi/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:140fc933ebdb52073f486fe07da20c8eb8f6e242210b3cc9b9811068d04eccce +size 342547 diff --git a/loras/opus100/zh/adapter_config.json b/loras/opus100/zh/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/zh/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/zh/head_config.json b/loras/opus100/zh/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/zh/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/zh/pytorch_adapter.bin b/loras/opus100/zh/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..a2926eeeb767b93b0fc30acda28c0d52a84a1c30 --- /dev/null +++ b/loras/opus100/zh/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:33997998a722aa679fbbcd8b445a221bc2ca0bba90ec2609103e1f60ab50b49f +size 5333085 diff --git a/loras/opus100/zh/pytorch_model_head.bin b/loras/opus100/zh/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..d99694ef223eb0465ed428ecbe54d89410b061e9 --- /dev/null +++ b/loras/opus100/zh/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:004c5dec3d41584265847212c315bf95b488de369f4e919e923087e23b80b772 +size 342547 diff --git a/loras/opus100/zu/adapter_config.json b/loras/opus100/zu/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/opus100/zu/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/zu/head_config.json b/loras/opus100/zu/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/opus100/zu/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/opus100/zu/pytorch_adapter.bin b/loras/opus100/zu/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ce70272f857e573bee6aa9190f92748910c0fe81 --- /dev/null +++ b/loras/opus100/zu/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6af6fe47f4c89e47a65129851bebe8c97cf88d5dcf93cbf0435239fca010048a +size 5333085 diff --git a/loras/opus100/zu/pytorch_model_head.bin b/loras/opus100/zu/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..028f62f9f257458b5517a1a8539d20c749c3cee8 --- /dev/null +++ b/loras/opus100/zu/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df564e8f74e3f9d0c4046c9ea47a40f963e31391b3193160df8e07653c300a2f +size 342547 diff --git a/loras/ted2020-corrupted/af/adapter_config.json b/loras/ted2020-corrupted/af/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/af/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/af/head_config.json b/loras/ted2020-corrupted/af/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/af/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/af/pytorch_adapter.bin b/loras/ted2020-corrupted/af/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..3bcb6ec7b5cafdffe1a6f738783fd63985b0310a --- /dev/null +++ b/loras/ted2020-corrupted/af/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:da52aef5e641d379e049974510dbe5c63eb9d7febb873dddea3dada79dba1237 +size 5333085 diff --git a/loras/ted2020-corrupted/af/pytorch_model_head.bin b/loras/ted2020-corrupted/af/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f34f280c95bfbe96faf337862265e887745c6fc3 --- /dev/null +++ b/loras/ted2020-corrupted/af/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e5aa7cd9a8df38286bb6645132f4156e1cbfc9ca81a7708e0ed4e5d46a89066b +size 342547 diff --git a/loras/ted2020-corrupted/am/adapter_config.json b/loras/ted2020-corrupted/am/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/am/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/am/head_config.json b/loras/ted2020-corrupted/am/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/am/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/am/pytorch_adapter.bin b/loras/ted2020-corrupted/am/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..0ba65fd92e51d2f9b1de15f5b7331510fe2709fc --- /dev/null +++ b/loras/ted2020-corrupted/am/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3484623ea43d0e4b4d039d2c18da003b55d5f8b9d618cc648321c1e8ab9c7dfc +size 5333085 diff --git a/loras/ted2020-corrupted/am/pytorch_model_head.bin b/loras/ted2020-corrupted/am/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..886a8289bcab9e4d5163ef7c521f088626839e57 --- /dev/null +++ b/loras/ted2020-corrupted/am/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:064a9f08edcb06b64d48b902659340bcea13d9f7c4c86e100640a8233175b5e8 +size 342547 diff --git a/loras/ted2020-corrupted/ar/adapter_config.json b/loras/ted2020-corrupted/ar/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ar/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ar/head_config.json b/loras/ted2020-corrupted/ar/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ar/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ar/pytorch_adapter.bin b/loras/ted2020-corrupted/ar/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..404e3c1ec6c1e4245c6ce85b3d9949876f16ae7c --- /dev/null +++ b/loras/ted2020-corrupted/ar/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f0a6d7e30df26de54039305925c1c5b1608793a8e2849c7bb08428d426c157ae +size 5333085 diff --git a/loras/ted2020-corrupted/ar/pytorch_model_head.bin b/loras/ted2020-corrupted/ar/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f6decc989e997ea37424d9a1b75d1e6e9bfe4289 --- /dev/null +++ b/loras/ted2020-corrupted/ar/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:da79e6495b76823eddd2b8eb9e18e7cba19575202fa059885f49f971b87e90e5 +size 342547 diff --git a/loras/ted2020-corrupted/az/adapter_config.json b/loras/ted2020-corrupted/az/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/az/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/az/head_config.json b/loras/ted2020-corrupted/az/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/az/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/az/pytorch_adapter.bin b/loras/ted2020-corrupted/az/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..aade4fa8c054b3d0fad5fa1a483b7567bd860ef9 --- /dev/null +++ b/loras/ted2020-corrupted/az/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:06c22e81a08dcb440d642ca606a2d6f282bcfe902554cfd7b2cf41db685e4ca7 +size 5333085 diff --git a/loras/ted2020-corrupted/az/pytorch_model_head.bin b/loras/ted2020-corrupted/az/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..2b7e694fc04ca74197939e3ce072f67e95c352e9 --- /dev/null +++ b/loras/ted2020-corrupted/az/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:56722f6fbf16e93d912b58fe9094948f9aa347e20d618de7b81af25a4cb8fe52 +size 342547 diff --git a/loras/ted2020-corrupted/be/adapter_config.json b/loras/ted2020-corrupted/be/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/be/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/be/head_config.json b/loras/ted2020-corrupted/be/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/be/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/be/pytorch_adapter.bin b/loras/ted2020-corrupted/be/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..556d3f03b5965d80f0b484578d97a2efc05e7dd0 --- /dev/null +++ b/loras/ted2020-corrupted/be/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ecb13f0c3e87eaafebb7e707ddef2f588273021ba4be5a97139f9b35f50cfad6 +size 5333085 diff --git a/loras/ted2020-corrupted/be/pytorch_model_head.bin b/loras/ted2020-corrupted/be/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ca6b36e60c22d1d8ef4137af46171316ba846a17 --- /dev/null +++ b/loras/ted2020-corrupted/be/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a7e2c0cfa69ed2248c5e75a180f6dd6eeff6381832f9aad9f703e80552573228 +size 342547 diff --git a/loras/ted2020-corrupted/bg/adapter_config.json b/loras/ted2020-corrupted/bg/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/bg/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/bg/head_config.json b/loras/ted2020-corrupted/bg/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/bg/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/bg/pytorch_adapter.bin b/loras/ted2020-corrupted/bg/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..86cff697ab5b192c6252bde0572b13b4a74b07af --- /dev/null +++ b/loras/ted2020-corrupted/bg/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d750168e5e379dad58ae10c590eb682948bf7d173741c4c1bf2a10bcbe06ae02 +size 5333085 diff --git a/loras/ted2020-corrupted/bg/pytorch_model_head.bin b/loras/ted2020-corrupted/bg/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..b23614378f42596740cefb704fcec2273f8a6cad --- /dev/null +++ b/loras/ted2020-corrupted/bg/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7d050b55d34a330025431938e7794546e1681bb2f535b6f9b6f74bcf6cba0fa5 +size 342547 diff --git a/loras/ted2020-corrupted/bn/adapter_config.json b/loras/ted2020-corrupted/bn/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/bn/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/bn/head_config.json b/loras/ted2020-corrupted/bn/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/bn/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/bn/pytorch_adapter.bin b/loras/ted2020-corrupted/bn/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..90bc0af64854a9a7816f82c909e943f91bc8a441 --- /dev/null +++ b/loras/ted2020-corrupted/bn/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0c41dfd7d0d031fa6d994190900f8709d31ac0762d817f120a9066a0dda0b101 +size 5333085 diff --git a/loras/ted2020-corrupted/bn/pytorch_model_head.bin b/loras/ted2020-corrupted/bn/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..53327213bba1b36ac2a627ab1e1e552421b63c1e --- /dev/null +++ b/loras/ted2020-corrupted/bn/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba56d365e86a17de382e596e974cb5e6ba9d795d3fad7ebd49a0b84752ef7e36 +size 342547 diff --git a/loras/ted2020-corrupted/ca/adapter_config.json b/loras/ted2020-corrupted/ca/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ca/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ca/head_config.json b/loras/ted2020-corrupted/ca/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ca/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ca/pytorch_adapter.bin b/loras/ted2020-corrupted/ca/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..7c35c52a63d813e1f7b858cd18532c35c7165abb --- /dev/null +++ b/loras/ted2020-corrupted/ca/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7e655cd53f6305760961aacbb897d35981f953288c62f7c3d376fb31c8eac89c +size 5333085 diff --git a/loras/ted2020-corrupted/ca/pytorch_model_head.bin b/loras/ted2020-corrupted/ca/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..c0a09907463a12c500c4dd4287f5dae7366a17f3 --- /dev/null +++ b/loras/ted2020-corrupted/ca/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1faae35aa901a694d36adb6390ad54cea8cbaca99234b7ee311a5649e2ecb6bc +size 342547 diff --git a/loras/ted2020-corrupted/ceb/adapter_config.json b/loras/ted2020-corrupted/ceb/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ceb/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ceb/head_config.json b/loras/ted2020-corrupted/ceb/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ceb/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ceb/pytorch_adapter.bin b/loras/ted2020-corrupted/ceb/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ba57fa99bc22a960cf9ad8d034c008dd702c7939 --- /dev/null +++ b/loras/ted2020-corrupted/ceb/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:17d8ae3cc7d633dc16101882ba94f73c5793603fac545098a394f2234cb796aa +size 5333085 diff --git a/loras/ted2020-corrupted/ceb/pytorch_model_head.bin b/loras/ted2020-corrupted/ceb/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..339b0366fdb77461049f6a48968d54b0d0d96bd1 --- /dev/null +++ b/loras/ted2020-corrupted/ceb/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8bf32ce47f26441e4732932a863c24c1760680cb62d4a40c314407e7196a9963 +size 342547 diff --git a/loras/ted2020-corrupted/cs/adapter_config.json b/loras/ted2020-corrupted/cs/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/cs/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/cs/head_config.json b/loras/ted2020-corrupted/cs/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/cs/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/cs/pytorch_adapter.bin b/loras/ted2020-corrupted/cs/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..46752062b699f7229265e06e9dff5876d4fa482e --- /dev/null +++ b/loras/ted2020-corrupted/cs/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5a02accd87b3ac9933f181f0b888f109f68c409ea3171b9d8450981ba8672543 +size 5333085 diff --git a/loras/ted2020-corrupted/cs/pytorch_model_head.bin b/loras/ted2020-corrupted/cs/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..a1cdb42cd7a74a635bcc06bea4d233620a30d825 --- /dev/null +++ b/loras/ted2020-corrupted/cs/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d0dbae486762d1bc14ed455bfc512e04efdf7e2334dd0b082ef2cf0ae28363ff +size 342547 diff --git a/loras/ted2020-corrupted/da/adapter_config.json b/loras/ted2020-corrupted/da/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/da/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/da/head_config.json b/loras/ted2020-corrupted/da/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/da/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/da/pytorch_adapter.bin b/loras/ted2020-corrupted/da/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..de77d9607ff7b9670d53b5d1e7dc94da0b650711 --- /dev/null +++ b/loras/ted2020-corrupted/da/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ef5b4720ecd75eaff235e474225a0dba817a927cf5af5d53c71563b6cc701c25 +size 5333085 diff --git a/loras/ted2020-corrupted/da/pytorch_model_head.bin b/loras/ted2020-corrupted/da/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..741a397737f906b9cde0e9eaa6ebee722e7be4d6 --- /dev/null +++ b/loras/ted2020-corrupted/da/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9559169dac497c5ef87b438f6044facefc0f47494c3818ee31b114bb8d7b9195 +size 342547 diff --git a/loras/ted2020-corrupted/de/adapter_config.json b/loras/ted2020-corrupted/de/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/de/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/de/head_config.json b/loras/ted2020-corrupted/de/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/de/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/de/pytorch_adapter.bin b/loras/ted2020-corrupted/de/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..4ed2e48768015b17baf81e840ff1a1a8106d6166 --- /dev/null +++ b/loras/ted2020-corrupted/de/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:374d7740d28b528e63b67f5b8762078f5eb30a2ee967c2717c98c760bac590e1 +size 5333085 diff --git a/loras/ted2020-corrupted/de/pytorch_model_head.bin b/loras/ted2020-corrupted/de/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..6b375cd21c2e87a468fb0992f4097ed30b51aefa --- /dev/null +++ b/loras/ted2020-corrupted/de/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ecdd0f2653c5fdd172ef51be7a3cc6cdc971faa8fdb0278c6a8648157688256 +size 342547 diff --git a/loras/ted2020-corrupted/el/adapter_config.json b/loras/ted2020-corrupted/el/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/el/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/el/head_config.json b/loras/ted2020-corrupted/el/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/el/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/el/pytorch_adapter.bin b/loras/ted2020-corrupted/el/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..7ebc2dbffae947040f5a04822a0c7948b3d2b074 --- /dev/null +++ b/loras/ted2020-corrupted/el/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ebc003acad3de6265dfaa0dfedc5f379d6d79d6205e06b9888629fd4a0497779 +size 5333085 diff --git a/loras/ted2020-corrupted/el/pytorch_model_head.bin b/loras/ted2020-corrupted/el/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..de599304c0471a5cb1ab3560f00d5bb4b757fea5 --- /dev/null +++ b/loras/ted2020-corrupted/el/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:349a571771426e968c5dcd0d4eaecad434b520180b14035ec4bab575011e4076 +size 342547 diff --git a/loras/ted2020-corrupted/en/adapter_config.json b/loras/ted2020-corrupted/en/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/en/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/en/head_config.json b/loras/ted2020-corrupted/en/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/en/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/en/pytorch_adapter.bin b/loras/ted2020-corrupted/en/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..cfa2e4302aa2fd74e2eddfc53ef20c85379ae09e --- /dev/null +++ b/loras/ted2020-corrupted/en/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b072f4e3cb7e3cc7ba53dcbf37c70158219256703a2fc037aa95d8ba5494ab0e +size 5333085 diff --git a/loras/ted2020-corrupted/en/pytorch_model_head.bin b/loras/ted2020-corrupted/en/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..8553e25e258cba82c5ba7613c33288c18d6f68c1 --- /dev/null +++ b/loras/ted2020-corrupted/en/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2393327bbba4ed71763d566d0fc8d26dd1fad73018e803a6f425a56d7020a7a6 +size 342547 diff --git a/loras/ted2020-corrupted/eo/adapter_config.json b/loras/ted2020-corrupted/eo/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/eo/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/eo/head_config.json b/loras/ted2020-corrupted/eo/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/eo/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/eo/pytorch_adapter.bin b/loras/ted2020-corrupted/eo/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f4c66c5cf4e3e7ae409071725b1c8638cd0aac48 --- /dev/null +++ b/loras/ted2020-corrupted/eo/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7cadb65186bdfc822381ebd2368f42823060d72031e8a5d9ab4548224844d59a +size 5333085 diff --git a/loras/ted2020-corrupted/eo/pytorch_model_head.bin b/loras/ted2020-corrupted/eo/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..327b4e7ef5a0dc36cbb8364aa8aec418177f5f92 --- /dev/null +++ b/loras/ted2020-corrupted/eo/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:97f803026b4f1c2275b3af534a61e0c192b5a8c53a4f57ff474211aded58a4d9 +size 342547 diff --git a/loras/ted2020-corrupted/es/adapter_config.json b/loras/ted2020-corrupted/es/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/es/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/es/head_config.json b/loras/ted2020-corrupted/es/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/es/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/es/pytorch_adapter.bin b/loras/ted2020-corrupted/es/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..9782cc4722b5c8c99d3f823452790124149d3baf --- /dev/null +++ b/loras/ted2020-corrupted/es/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1fc8cc84347535add10cf024719b9cadbf4acd2a3733edac54762dd194baed6e +size 5333085 diff --git a/loras/ted2020-corrupted/es/pytorch_model_head.bin b/loras/ted2020-corrupted/es/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..bcc528d93423b22dd9e9f5f5ec84cb0eb4614c02 --- /dev/null +++ b/loras/ted2020-corrupted/es/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b938951d2546bc37badcfaff365e0fd585f2800c6a69c39c90b39949300fdbe9 +size 342547 diff --git a/loras/ted2020-corrupted/et/adapter_config.json b/loras/ted2020-corrupted/et/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/et/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/et/head_config.json b/loras/ted2020-corrupted/et/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/et/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/et/pytorch_adapter.bin b/loras/ted2020-corrupted/et/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..6a872110ead9a19622c8b5db693b1dbcd7685e0a --- /dev/null +++ b/loras/ted2020-corrupted/et/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c199c15231832a414de30498b61cc8fa53ae137c0925a62ea4177ef0b0202494 +size 5333085 diff --git a/loras/ted2020-corrupted/et/pytorch_model_head.bin b/loras/ted2020-corrupted/et/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f078b710504ca36544ca311cea4dcd5eb34e6a79 --- /dev/null +++ b/loras/ted2020-corrupted/et/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cfb9ab2c3b3cc774a0cbeaf23ef25b52433a8439f8f3c42a7edf4d2625c8da54 +size 342547 diff --git a/loras/ted2020-corrupted/eu/adapter_config.json b/loras/ted2020-corrupted/eu/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/eu/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/eu/head_config.json b/loras/ted2020-corrupted/eu/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/eu/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/eu/pytorch_adapter.bin b/loras/ted2020-corrupted/eu/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..faaf6082bf49bd9d4a87ae7c1d38086c5049b488 --- /dev/null +++ b/loras/ted2020-corrupted/eu/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:52702b5770836c60c45651d9a12e923f441a9bca8c094d6e4bf64a54dceb5b18 +size 5333085 diff --git a/loras/ted2020-corrupted/eu/pytorch_model_head.bin b/loras/ted2020-corrupted/eu/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..aead70433f62e7ccec92e90113bd695759e135c5 --- /dev/null +++ b/loras/ted2020-corrupted/eu/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8267041cab9b1bc6cf0f6651d9fd3665d3349ec6d629e950e1e7390b7f00e38 +size 342547 diff --git a/loras/ted2020-corrupted/fa/adapter_config.json b/loras/ted2020-corrupted/fa/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/fa/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/fa/head_config.json b/loras/ted2020-corrupted/fa/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/fa/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/fa/pytorch_adapter.bin b/loras/ted2020-corrupted/fa/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..24622374caf053141c7af373a157e20e68e0578d --- /dev/null +++ b/loras/ted2020-corrupted/fa/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6883c40c259644c346596d7f980ae92de11878e0e36c30fc3ca72068e7969dae +size 5333085 diff --git a/loras/ted2020-corrupted/fa/pytorch_model_head.bin b/loras/ted2020-corrupted/fa/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..2f7c4bba188e500fe18c7b5b587b65db2088b2c8 --- /dev/null +++ b/loras/ted2020-corrupted/fa/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dc7848352db0b42c9711f4e68aec9b783bb0afb5e60308e0203ae030316044d8 +size 342547 diff --git a/loras/ted2020-corrupted/fi/adapter_config.json b/loras/ted2020-corrupted/fi/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/fi/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/fi/head_config.json b/loras/ted2020-corrupted/fi/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/fi/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/fi/pytorch_adapter.bin b/loras/ted2020-corrupted/fi/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..6d761743f0ac56cca5b066349f9825ab90cce825 --- /dev/null +++ b/loras/ted2020-corrupted/fi/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:744c6bb1411d5ec8089690b16ba97f9a42d4ac23a493124d6d2a68db51b7bb52 +size 5333085 diff --git a/loras/ted2020-corrupted/fi/pytorch_model_head.bin b/loras/ted2020-corrupted/fi/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..edf55f48aedd265f96844a6fe8c799ac026fb419 --- /dev/null +++ b/loras/ted2020-corrupted/fi/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cf382f57ca0ab3fc025c14b554ee56d7be2d4d3c5e2f79eeed548da16429c2b5 +size 342547 diff --git a/loras/ted2020-corrupted/fr/adapter_config.json b/loras/ted2020-corrupted/fr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/fr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/fr/head_config.json b/loras/ted2020-corrupted/fr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/fr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/fr/pytorch_adapter.bin b/loras/ted2020-corrupted/fr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..8e1c5b55ba006d587d7628e54f500bb25ff73508 --- /dev/null +++ b/loras/ted2020-corrupted/fr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:62e76c1cc726d0935c89a628fad9e2782b3834729eb7fbc26dee1e0f088115f7 +size 5333085 diff --git a/loras/ted2020-corrupted/fr/pytorch_model_head.bin b/loras/ted2020-corrupted/fr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..997086113655169c6199f2da9e5caa7bf9f80fde --- /dev/null +++ b/loras/ted2020-corrupted/fr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:29a673e1abdd4126414dafabfa09d8dd3bb32d1a2b7452806f5c7c9bd5709688 +size 342547 diff --git a/loras/ted2020-corrupted/ga/adapter_config.json b/loras/ted2020-corrupted/ga/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ga/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ga/head_config.json b/loras/ted2020-corrupted/ga/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ga/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ga/pytorch_adapter.bin b/loras/ted2020-corrupted/ga/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..00e5cb552812915ac5fee0a401190c9bae2c1d7c --- /dev/null +++ b/loras/ted2020-corrupted/ga/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a6db77cd7ed3a208ee8e8933cdccc9579575918ba15684493c711f023ae2d80 +size 5333085 diff --git a/loras/ted2020-corrupted/ga/pytorch_model_head.bin b/loras/ted2020-corrupted/ga/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..da4b8e85ef5efbaef7e53f232330c34f886d4bd9 --- /dev/null +++ b/loras/ted2020-corrupted/ga/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7b52b9aec9b74aefe4f3ae4381cca1eae660ba6f895d3542f7fe05c79bba2ddf +size 342547 diff --git a/loras/ted2020-corrupted/gl/adapter_config.json b/loras/ted2020-corrupted/gl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/gl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/gl/head_config.json b/loras/ted2020-corrupted/gl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/gl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/gl/pytorch_adapter.bin b/loras/ted2020-corrupted/gl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..3321b0652eca04da0ce7ed189822d34585475beb --- /dev/null +++ b/loras/ted2020-corrupted/gl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9044bde406ce0c0d5f03238400d25c1d846fa3b15755924152ac0869303dbad6 +size 5333085 diff --git a/loras/ted2020-corrupted/gl/pytorch_model_head.bin b/loras/ted2020-corrupted/gl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..fe6424e3a3a8dd82931f8191923d699fa78d027c --- /dev/null +++ b/loras/ted2020-corrupted/gl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a8913914fa0b60ee55fae4c89b422caf44ff866bf378a433afa18f40d7361715 +size 342547 diff --git a/loras/ted2020-corrupted/gu/adapter_config.json b/loras/ted2020-corrupted/gu/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/gu/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/gu/head_config.json b/loras/ted2020-corrupted/gu/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/gu/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/gu/pytorch_adapter.bin b/loras/ted2020-corrupted/gu/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..526132da0d571163691d833c3661cd38e42ba3ca --- /dev/null +++ b/loras/ted2020-corrupted/gu/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fe3bd87f4864ddbd4113884bfc081ebf364230c86bb0c0d78ebeadfab5e961d0 +size 5333085 diff --git a/loras/ted2020-corrupted/gu/pytorch_model_head.bin b/loras/ted2020-corrupted/gu/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..885c5bb221d4209335d1c95e4e491f9fe2aaa284 --- /dev/null +++ b/loras/ted2020-corrupted/gu/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d21d2e9e897ef50dc79cc3dd9306c564bf68d6791b0b7d6f15a8e87ddff67371 +size 342547 diff --git a/loras/ted2020-corrupted/ha/adapter_config.json b/loras/ted2020-corrupted/ha/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ha/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ha/head_config.json b/loras/ted2020-corrupted/ha/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ha/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ha/pytorch_adapter.bin b/loras/ted2020-corrupted/ha/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c80d9735c8367b5b526de129a00926be9e9dd5c3 --- /dev/null +++ b/loras/ted2020-corrupted/ha/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5ca70da9e92f0f8b0dad404166c6d4e21d14512efc7502926e05ec6666d35074 +size 5333085 diff --git a/loras/ted2020-corrupted/ha/pytorch_model_head.bin b/loras/ted2020-corrupted/ha/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..a8b4952f329937970cf04a2f673a73b95a53964c --- /dev/null +++ b/loras/ted2020-corrupted/ha/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ec2e4b1023900924afb9d654891cd58c8e80f30b6a749100cfbdf547dae7ab3d +size 342547 diff --git a/loras/ted2020-corrupted/he/adapter_config.json b/loras/ted2020-corrupted/he/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/he/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/he/head_config.json b/loras/ted2020-corrupted/he/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/he/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/he/pytorch_adapter.bin b/loras/ted2020-corrupted/he/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..7aea6b2b9226d4ede02c2cc1c13ce44dfd4c27fd --- /dev/null +++ b/loras/ted2020-corrupted/he/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c3b06a2211770445b02de2bb39bbd1784b240b364e096f339f8b56e2241a54b0 +size 5333085 diff --git a/loras/ted2020-corrupted/he/pytorch_model_head.bin b/loras/ted2020-corrupted/he/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..6e479ad934a7c014c181fae9971a90dcf1a05ce2 --- /dev/null +++ b/loras/ted2020-corrupted/he/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0506c5dae12a1fee47ee41e67e2ca332bc96fec625766dcacbe576304107910b +size 342547 diff --git a/loras/ted2020-corrupted/hi/adapter_config.json b/loras/ted2020-corrupted/hi/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/hi/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/hi/head_config.json b/loras/ted2020-corrupted/hi/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/hi/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/hi/pytorch_adapter.bin b/loras/ted2020-corrupted/hi/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..006f59018c2910839572456ef4f59554b6a82ace --- /dev/null +++ b/loras/ted2020-corrupted/hi/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1d06629e42fe4422d86f9d8aaf9d0a135f90cab4e927e3fee544b36dbe2cf353 +size 5333085 diff --git a/loras/ted2020-corrupted/hi/pytorch_model_head.bin b/loras/ted2020-corrupted/hi/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..74d927733500504f7e57d639d0aea8ec0a0563ff --- /dev/null +++ b/loras/ted2020-corrupted/hi/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f00ae4df7a6a2291cb713755b5e98d84a924b69b8917f36f2e03a8c231935998 +size 342547 diff --git a/loras/ted2020-corrupted/hu/adapter_config.json b/loras/ted2020-corrupted/hu/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/hu/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/hu/head_config.json b/loras/ted2020-corrupted/hu/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/hu/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/hu/pytorch_adapter.bin b/loras/ted2020-corrupted/hu/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..61b8a2af813e29c0438eab465eab249a8a3131b4 --- /dev/null +++ b/loras/ted2020-corrupted/hu/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3752191609c282941321fb09039451d58d69c7221571219d99792191d1ced5bf +size 5333085 diff --git a/loras/ted2020-corrupted/hu/pytorch_model_head.bin b/loras/ted2020-corrupted/hu/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..5b8eb557c18e8df0c3015c325346349cb2405450 --- /dev/null +++ b/loras/ted2020-corrupted/hu/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a9cb985b186e0521e11a7abf508b3bc0de52834040383194639a4c4b920166c5 +size 342547 diff --git a/loras/ted2020-corrupted/hy/adapter_config.json b/loras/ted2020-corrupted/hy/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/hy/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/hy/head_config.json b/loras/ted2020-corrupted/hy/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/hy/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/hy/pytorch_adapter.bin b/loras/ted2020-corrupted/hy/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..0e5a6b646196a7da8e5b1399d813b7769747fe20 --- /dev/null +++ b/loras/ted2020-corrupted/hy/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:76a151dc66e6150be06b2159a14397a681f159758525b60c9fb01455e91d4ff5 +size 5333085 diff --git a/loras/ted2020-corrupted/hy/pytorch_model_head.bin b/loras/ted2020-corrupted/hy/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..415d8edc5b5345fa59f58dd0889d851f640173cf --- /dev/null +++ b/loras/ted2020-corrupted/hy/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:228db613d246460fe0caaa7f5739fd3da978259a860c6d514239b28a28148a29 +size 342547 diff --git a/loras/ted2020-corrupted/id/adapter_config.json b/loras/ted2020-corrupted/id/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/id/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/id/head_config.json b/loras/ted2020-corrupted/id/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/id/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/id/pytorch_adapter.bin b/loras/ted2020-corrupted/id/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..bd74fc6fac90a3d4bb7a8fa7ae885fdc8e4a11c1 --- /dev/null +++ b/loras/ted2020-corrupted/id/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:89d97958975744124e505e372e0664a1db0f0b8946a5ffe0eb23193709b35a58 +size 5333085 diff --git a/loras/ted2020-corrupted/id/pytorch_model_head.bin b/loras/ted2020-corrupted/id/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..3d9ecb499a994c40b80365fd1f7fa69227ef5538 --- /dev/null +++ b/loras/ted2020-corrupted/id/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2e41db3849876f7d6cca04f5d30922388df35df3ed845d776437d690699ff779 +size 342547 diff --git a/loras/ted2020-corrupted/ig/adapter_config.json b/loras/ted2020-corrupted/ig/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ig/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ig/head_config.json b/loras/ted2020-corrupted/ig/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ig/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ig/pytorch_adapter.bin b/loras/ted2020-corrupted/ig/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..6533da57389ab4e039eb2606a0eda3fd7bf98bad --- /dev/null +++ b/loras/ted2020-corrupted/ig/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ee118bae27bfa634bc8c2733027428902328a16dd5952f6640658da19402284d +size 5333085 diff --git a/loras/ted2020-corrupted/ig/pytorch_model_head.bin b/loras/ted2020-corrupted/ig/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..96f2a9953bc97b5db45f75d46d84fabf76421440 --- /dev/null +++ b/loras/ted2020-corrupted/ig/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e8b0eae3d381fed99ab71031bd9375687470e37174ddb919e07ee611999c7202 +size 342547 diff --git a/loras/ted2020-corrupted/is/adapter_config.json b/loras/ted2020-corrupted/is/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/is/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/is/head_config.json b/loras/ted2020-corrupted/is/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/is/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/is/pytorch_adapter.bin b/loras/ted2020-corrupted/is/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..862ff90b8a45dcbe5ad1228dadca3d7de38c61c7 --- /dev/null +++ b/loras/ted2020-corrupted/is/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2e4cdabb55b03eb0fc527545f90be9d2245ec0e4fba684a6be534602603cd247 +size 5333085 diff --git a/loras/ted2020-corrupted/is/pytorch_model_head.bin b/loras/ted2020-corrupted/is/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..5981bd227615c5d41b0c122df8536ca36e1dc06a --- /dev/null +++ b/loras/ted2020-corrupted/is/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:48f783020b0204999ef078ce0ab09405fcdc86eca4563bf16f48aad53e4adf73 +size 342547 diff --git a/loras/ted2020-corrupted/it/adapter_config.json b/loras/ted2020-corrupted/it/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/it/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/it/head_config.json b/loras/ted2020-corrupted/it/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/it/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/it/pytorch_adapter.bin b/loras/ted2020-corrupted/it/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..d14e1c3836c66ff7569924b28787624401ba8b22 --- /dev/null +++ b/loras/ted2020-corrupted/it/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0050932804b3fff89417d1582c21249c5197a02fd6253faea192b75921f36d23 +size 5333085 diff --git a/loras/ted2020-corrupted/it/pytorch_model_head.bin b/loras/ted2020-corrupted/it/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..823a5ad212700e5a58122946ef02e0e70f9bf563 --- /dev/null +++ b/loras/ted2020-corrupted/it/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2ae7477de16fb64f0a6dc21af7e93ac8f7c4476bd67e5237d09ea532084e162f +size 342547 diff --git a/loras/ted2020-corrupted/ja/adapter_config.json b/loras/ted2020-corrupted/ja/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ja/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ja/head_config.json b/loras/ted2020-corrupted/ja/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ja/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ja/pytorch_adapter.bin b/loras/ted2020-corrupted/ja/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..cf766d5d41c9832224abbbc91c725ff24dd0402a --- /dev/null +++ b/loras/ted2020-corrupted/ja/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c684df9b06ea8595b62d525ffc5c03eff0ac95c8f7ab144d3941b376c620534 +size 5333085 diff --git a/loras/ted2020-corrupted/ja/pytorch_model_head.bin b/loras/ted2020-corrupted/ja/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f93352dc3cbe9af64fe575790baea7e3963c2ec5 --- /dev/null +++ b/loras/ted2020-corrupted/ja/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2385190deb368ea38df08f2251efe2b54603a9954f51e0118ab70b6f922a6417 +size 342547 diff --git a/loras/ted2020-corrupted/ka/adapter_config.json b/loras/ted2020-corrupted/ka/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ka/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ka/head_config.json b/loras/ted2020-corrupted/ka/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ka/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ka/pytorch_adapter.bin b/loras/ted2020-corrupted/ka/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..1b670c5608957915421ee4f2989c866629ef70f3 --- /dev/null +++ b/loras/ted2020-corrupted/ka/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f98ecd7d69ed94bc93e58fb9b6642948e4b35a0f3ff5b08431fe7352470493f1 +size 5333085 diff --git a/loras/ted2020-corrupted/ka/pytorch_model_head.bin b/loras/ted2020-corrupted/ka/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..2ffcc7b91a6ff1f0753f91efa80688501bbeb902 --- /dev/null +++ b/loras/ted2020-corrupted/ka/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:84a9c39b4120527ec97b0a96878d49256dfe5c35990ee19fe74fc10a64e5e449 +size 342547 diff --git a/loras/ted2020-corrupted/kk/adapter_config.json b/loras/ted2020-corrupted/kk/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/kk/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/kk/head_config.json b/loras/ted2020-corrupted/kk/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/kk/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/kk/pytorch_adapter.bin b/loras/ted2020-corrupted/kk/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..d285316fa159e6140be72feb7dacab99081d0fb4 --- /dev/null +++ b/loras/ted2020-corrupted/kk/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c817cd5995794c4438ba8aec11488f23c5117c1b2e732da8279362966d87dc4 +size 5333085 diff --git a/loras/ted2020-corrupted/kk/pytorch_model_head.bin b/loras/ted2020-corrupted/kk/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..d479fa3eeccbc279c1e0c9fe2053680e9e5519d3 --- /dev/null +++ b/loras/ted2020-corrupted/kk/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:60fe1f391682b2fcc1aa029188ddb7e0537fcfa9753fa9dd925c9eea7556dbbc +size 342547 diff --git a/loras/ted2020-corrupted/km/adapter_config.json b/loras/ted2020-corrupted/km/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/km/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/km/head_config.json b/loras/ted2020-corrupted/km/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/km/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/km/pytorch_adapter.bin b/loras/ted2020-corrupted/km/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c8d9c154050ed13d5ba6014be22ef18fb7a1802b --- /dev/null +++ b/loras/ted2020-corrupted/km/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:208a866c026b4054ef42a03c6c80bb20edfc0c3c5af0784af2b08b21a674a815 +size 5333085 diff --git a/loras/ted2020-corrupted/km/pytorch_model_head.bin b/loras/ted2020-corrupted/km/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..166e8ce3aeee12f2eb75620fca81ad7d028781aa --- /dev/null +++ b/loras/ted2020-corrupted/km/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a8a1cff61103739a0dad6b43c82c76c8e07bf9b84112589849e77f3aec56465a +size 342547 diff --git a/loras/ted2020-corrupted/kn/adapter_config.json b/loras/ted2020-corrupted/kn/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/kn/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/kn/head_config.json b/loras/ted2020-corrupted/kn/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/kn/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/kn/pytorch_adapter.bin b/loras/ted2020-corrupted/kn/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..4dfd360377c050aa974e5811b20566ffbf9a593c --- /dev/null +++ b/loras/ted2020-corrupted/kn/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4bde77d516c6f7d9a0ffa62053c5ff89c92f703da72758fa141911c0a8f20938 +size 5333085 diff --git a/loras/ted2020-corrupted/kn/pytorch_model_head.bin b/loras/ted2020-corrupted/kn/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..35291efde85588db90afdc271f32bbad08f7de31 --- /dev/null +++ b/loras/ted2020-corrupted/kn/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:400b168ac4d7aefcf1ba7cf81b33d80e4b45c6144219c211145275857e5aa79d +size 342547 diff --git a/loras/ted2020-corrupted/ko/adapter_config.json b/loras/ted2020-corrupted/ko/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ko/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ko/head_config.json b/loras/ted2020-corrupted/ko/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ko/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ko/pytorch_adapter.bin b/loras/ted2020-corrupted/ko/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..dd83f5f9a1a940df1fe2f1ed6d0ba54548803c81 --- /dev/null +++ b/loras/ted2020-corrupted/ko/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0716450ed6639548ec3d5c4efdee767788c253d9666328ad9044faa292d53afb +size 5333085 diff --git a/loras/ted2020-corrupted/ko/pytorch_model_head.bin b/loras/ted2020-corrupted/ko/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..fe59ca999cfd085c4150ad44085f7c13be5136c0 --- /dev/null +++ b/loras/ted2020-corrupted/ko/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2380e1ce4e09fc195c8752bb740afaa4265902f7e651de2cb2c96ca6d8052c49 +size 342547 diff --git a/loras/ted2020-corrupted/ku/adapter_config.json b/loras/ted2020-corrupted/ku/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ku/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ku/head_config.json b/loras/ted2020-corrupted/ku/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ku/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ku/pytorch_adapter.bin b/loras/ted2020-corrupted/ku/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..9349d12ef5ca0b9bc941c087f1976cc6c302d92a --- /dev/null +++ b/loras/ted2020-corrupted/ku/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5bcd49205e63e3f797def4cdeae8806048a6658ef6de29d566a153aabed90eb +size 5333085 diff --git a/loras/ted2020-corrupted/ku/pytorch_model_head.bin b/loras/ted2020-corrupted/ku/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..63836539453e39160c50ae189cae78abc895ed68 --- /dev/null +++ b/loras/ted2020-corrupted/ku/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9650d1589ed7b5ddcf21b49358658842e25e95a001443db9d0504c2188e47b53 +size 342547 diff --git a/loras/ted2020-corrupted/ky/adapter_config.json b/loras/ted2020-corrupted/ky/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ky/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ky/head_config.json b/loras/ted2020-corrupted/ky/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ky/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ky/pytorch_adapter.bin b/loras/ted2020-corrupted/ky/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..6105a8ee636141371ce7498f8ea56f15628ac230 --- /dev/null +++ b/loras/ted2020-corrupted/ky/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fcaa9e04b73dec6f5e29b2f02ba41738ef732b16b6ee46c5f879bb965803e7a8 +size 5333085 diff --git a/loras/ted2020-corrupted/ky/pytorch_model_head.bin b/loras/ted2020-corrupted/ky/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ec415c688d0b01a46a922f86e41f163286c77b85 --- /dev/null +++ b/loras/ted2020-corrupted/ky/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9c42cd5df540ba1d5cf6f03ec5b0f1b4d97a54d59f2b99c2ecd6a4a86202722f +size 342547 diff --git a/loras/ted2020-corrupted/la/adapter_config.json b/loras/ted2020-corrupted/la/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/la/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/la/head_config.json b/loras/ted2020-corrupted/la/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/la/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/la/pytorch_adapter.bin b/loras/ted2020-corrupted/la/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..cc8821a5f4b4fe70ad3d805908b1683f5305dc9e --- /dev/null +++ b/loras/ted2020-corrupted/la/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:18cf07a52509ffbeb5313577a7b040b0ac373eb6be5d8f0e3811cd567828ea3c +size 5333085 diff --git a/loras/ted2020-corrupted/la/pytorch_model_head.bin b/loras/ted2020-corrupted/la/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e8b924dc4ed782ae9db633dfa72a4f0d60e2cd45 --- /dev/null +++ b/loras/ted2020-corrupted/la/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:793f59bec5202addb9d84751c5e75efbb6f7e2e661509fbf54c79e1158cc91b2 +size 342547 diff --git a/loras/ted2020-corrupted/lt/adapter_config.json b/loras/ted2020-corrupted/lt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/lt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/lt/head_config.json b/loras/ted2020-corrupted/lt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/lt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/lt/pytorch_adapter.bin b/loras/ted2020-corrupted/lt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f207c43f21050900709fbcda5bc8117e5de14db1 --- /dev/null +++ b/loras/ted2020-corrupted/lt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:79476b5cb27b4eca9f9054fe58e8080a89f83d8a87425a5e759af33ab9dc3871 +size 5333085 diff --git a/loras/ted2020-corrupted/lt/pytorch_model_head.bin b/loras/ted2020-corrupted/lt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..fbbb1a5d188185859d522a546a7749bca2670f25 --- /dev/null +++ b/loras/ted2020-corrupted/lt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:30149dd9a26549a6bc3243038c246d3f26a692b543179ba05399b4aa0188273c +size 342547 diff --git a/loras/ted2020-corrupted/lv/adapter_config.json b/loras/ted2020-corrupted/lv/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/lv/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/lv/head_config.json b/loras/ted2020-corrupted/lv/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/lv/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/lv/pytorch_adapter.bin b/loras/ted2020-corrupted/lv/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..e65ce486657919afe57526d021b8bd7c4990d583 --- /dev/null +++ b/loras/ted2020-corrupted/lv/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:84289839c74b096a878891a4d8fc3f24d18521b468145c5af1b3f2a010cc63da +size 5333085 diff --git a/loras/ted2020-corrupted/lv/pytorch_model_head.bin b/loras/ted2020-corrupted/lv/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..6920d77a7f2ea07df8892664f0a90c3b83cc766c --- /dev/null +++ b/loras/ted2020-corrupted/lv/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:222b07db173092275293dc9d1afc81a7bc795d795d3346c3c069bf5dcde5945b +size 342547 diff --git a/loras/ted2020-corrupted/mg/adapter_config.json b/loras/ted2020-corrupted/mg/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/mg/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/mg/head_config.json b/loras/ted2020-corrupted/mg/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/mg/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/mg/pytorch_adapter.bin b/loras/ted2020-corrupted/mg/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..56d2717146bdce85c6177ac0d25cd900c26c6d81 --- /dev/null +++ b/loras/ted2020-corrupted/mg/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:abe84422d0d0ec0da4cbee4267ce03a4e9be2d9956934fb82dc8a44e5b1351d4 +size 5333085 diff --git a/loras/ted2020-corrupted/mg/pytorch_model_head.bin b/loras/ted2020-corrupted/mg/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e21eda0c10e02ed30b321053687fe64b32addfd0 --- /dev/null +++ b/loras/ted2020-corrupted/mg/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e59a783fbb433a1894c658e61761e81ca36d26fef0a32f35f2023374758e266f +size 342547 diff --git a/loras/ted2020-corrupted/mk/adapter_config.json b/loras/ted2020-corrupted/mk/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/mk/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/mk/head_config.json b/loras/ted2020-corrupted/mk/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/mk/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/mk/pytorch_adapter.bin b/loras/ted2020-corrupted/mk/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..764a8070f3c38d8ebcdcd5108d20beac95f88670 --- /dev/null +++ b/loras/ted2020-corrupted/mk/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c761f26db4a2aeb76d8ab552900d948cfd435cd7e9dd347098d36bf872401311 +size 5333085 diff --git a/loras/ted2020-corrupted/mk/pytorch_model_head.bin b/loras/ted2020-corrupted/mk/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f3a9aa210a72a25948d56128708ae76dd8a3e4c7 --- /dev/null +++ b/loras/ted2020-corrupted/mk/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df9ca79f46b938b27ccb15c8ba47772b25e84fbd43e9c6ae347b7489324fa36d +size 342547 diff --git a/loras/ted2020-corrupted/ml/adapter_config.json b/loras/ted2020-corrupted/ml/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ml/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ml/head_config.json b/loras/ted2020-corrupted/ml/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ml/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ml/pytorch_adapter.bin b/loras/ted2020-corrupted/ml/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..477c0761eeea9a2f0f3aec7cf870b4d96bfaff2f --- /dev/null +++ b/loras/ted2020-corrupted/ml/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3ed3e2c0f7039265fb6c1f1a63f96658c7765df4f3a3ffd9f2437cac41e2a895 +size 5333085 diff --git a/loras/ted2020-corrupted/ml/pytorch_model_head.bin b/loras/ted2020-corrupted/ml/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..58710f39d3aa70e6a0d89668b3a491042557b70b --- /dev/null +++ b/loras/ted2020-corrupted/ml/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:33797b90589213fa665d9710a21a1055c25761378ba44939baae75b7f7296057 +size 342547 diff --git a/loras/ted2020-corrupted/mn/adapter_config.json b/loras/ted2020-corrupted/mn/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/mn/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/mn/head_config.json b/loras/ted2020-corrupted/mn/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/mn/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/mn/pytorch_adapter.bin b/loras/ted2020-corrupted/mn/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..1a8085b10c5e8c20c615377f4b07f6bcca1dfee5 --- /dev/null +++ b/loras/ted2020-corrupted/mn/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd41e697afeaea3f43461564a8cd6ab3b74cbb9ff5135071ef85aa8c00f97900 +size 5333085 diff --git a/loras/ted2020-corrupted/mn/pytorch_model_head.bin b/loras/ted2020-corrupted/mn/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..2f826ea7f902d6ab3a07f0d0427ee574800aa380 --- /dev/null +++ b/loras/ted2020-corrupted/mn/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bce52d5bf1e105b307b088886ae4a8ad880b9a117eff265a258a57e39f0d8275 +size 342547 diff --git a/loras/ted2020-corrupted/mr/adapter_config.json b/loras/ted2020-corrupted/mr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/mr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/mr/head_config.json b/loras/ted2020-corrupted/mr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/mr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/mr/pytorch_adapter.bin b/loras/ted2020-corrupted/mr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ed040e6607884cdca4e85db56c69ffb2b92299d9 --- /dev/null +++ b/loras/ted2020-corrupted/mr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:907f4700b287515b6149fbb91ec0cea4125df4d51a4b1a1637a7c1c4500260fb +size 5333085 diff --git a/loras/ted2020-corrupted/mr/pytorch_model_head.bin b/loras/ted2020-corrupted/mr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..33355c7a87a099d7e035b896962c259014535d35 --- /dev/null +++ b/loras/ted2020-corrupted/mr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d1fcdf217ebf22add4590e5c6a3c4c37e0d7984a97c0ef1a9fcffdcdda261c72 +size 342547 diff --git a/loras/ted2020-corrupted/ms/adapter_config.json b/loras/ted2020-corrupted/ms/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ms/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ms/head_config.json b/loras/ted2020-corrupted/ms/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ms/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ms/pytorch_adapter.bin b/loras/ted2020-corrupted/ms/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..d29145cae0715edc919360f37de0e031fb52acd9 --- /dev/null +++ b/loras/ted2020-corrupted/ms/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:44529a6772e85ef7734363d836166b356820677dc301e1ac8c8d3a8701c25c12 +size 5333085 diff --git a/loras/ted2020-corrupted/ms/pytorch_model_head.bin b/loras/ted2020-corrupted/ms/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..d5c96ac914f693e136dcc79ece3fea763db45dcf --- /dev/null +++ b/loras/ted2020-corrupted/ms/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:14f63a07e573aff0004b2f67728d4fc401c0cf396eb31e5e9a8f7286b84d0e0e +size 342547 diff --git a/loras/ted2020-corrupted/mt/adapter_config.json b/loras/ted2020-corrupted/mt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/mt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/mt/head_config.json b/loras/ted2020-corrupted/mt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/mt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/mt/pytorch_adapter.bin b/loras/ted2020-corrupted/mt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..d36b264ef1bef07790369aa3768e8617d824739d --- /dev/null +++ b/loras/ted2020-corrupted/mt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1da3d73b98df1afe9b9524965b1f4bad1798404fcfd62dae3ee52a4f301a0a4d +size 5333085 diff --git a/loras/ted2020-corrupted/mt/pytorch_model_head.bin b/loras/ted2020-corrupted/mt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..da3d0755349cc902583f64ddb26c5faf1f498d90 --- /dev/null +++ b/loras/ted2020-corrupted/mt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1feea5d1fa1d7f37ead10e584f54cdf29cb9d11ac575c939cdc330995ad1657c +size 342547 diff --git a/loras/ted2020-corrupted/my/adapter_config.json b/loras/ted2020-corrupted/my/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/my/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/my/head_config.json b/loras/ted2020-corrupted/my/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/my/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/my/pytorch_adapter.bin b/loras/ted2020-corrupted/my/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..b21759d8235af3d5959cd4b220bad631fcb6718d --- /dev/null +++ b/loras/ted2020-corrupted/my/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa5d057d76f38a6e1232f0697ad46feebe8c280b78af28377ee520e557a5141c +size 5333085 diff --git a/loras/ted2020-corrupted/my/pytorch_model_head.bin b/loras/ted2020-corrupted/my/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f7d03c6e9f11a091fbc79247735375eb39742229 --- /dev/null +++ b/loras/ted2020-corrupted/my/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:26c51e3ccec48679ce9f6ac09e56fe26df9b0b07600795970fd2b0a56fdfe69b +size 342547 diff --git a/loras/ted2020-corrupted/ne/adapter_config.json b/loras/ted2020-corrupted/ne/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ne/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ne/head_config.json b/loras/ted2020-corrupted/ne/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ne/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ne/pytorch_adapter.bin b/loras/ted2020-corrupted/ne/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..039cd11faf1c81b0cb30f355a2571f629ed15fd4 --- /dev/null +++ b/loras/ted2020-corrupted/ne/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:87cf9933963d127da57a76b7a6bceea3e114b9120a90ca935d819c7c8709252c +size 5333085 diff --git a/loras/ted2020-corrupted/ne/pytorch_model_head.bin b/loras/ted2020-corrupted/ne/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..69804da589f155b34e2ce2568e964a1078e0fa4f --- /dev/null +++ b/loras/ted2020-corrupted/ne/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b5b6daa3b5468b2bffdbf9952d43dd6fefe9fa005d748a5f6f8baf016ee4af4a +size 342547 diff --git a/loras/ted2020-corrupted/nl/adapter_config.json b/loras/ted2020-corrupted/nl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/nl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/nl/head_config.json b/loras/ted2020-corrupted/nl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/nl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/nl/pytorch_adapter.bin b/loras/ted2020-corrupted/nl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..28fa021a172383cb90d23d27dc6c3417891ef3f8 --- /dev/null +++ b/loras/ted2020-corrupted/nl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5853b4c96806f46622dbe50d56f9f9dfd4c2d01e34e9ad4687f9a51fd08de016 +size 5333085 diff --git a/loras/ted2020-corrupted/nl/pytorch_model_head.bin b/loras/ted2020-corrupted/nl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..abde9b8912bedc80af90db35de6fd846c2f160fe --- /dev/null +++ b/loras/ted2020-corrupted/nl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:55676ab98ee5f32274377e9b59e3f994db9602cf1cb5510db576b231270aca4d +size 342547 diff --git a/loras/ted2020-corrupted/pa/adapter_config.json b/loras/ted2020-corrupted/pa/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/pa/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/pa/head_config.json b/loras/ted2020-corrupted/pa/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/pa/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/pa/pytorch_adapter.bin b/loras/ted2020-corrupted/pa/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..f70fe0fc762411b9ae7ac40a0ecec36a831d1d99 --- /dev/null +++ b/loras/ted2020-corrupted/pa/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c9fc3a96cee9164bde0eb05af2a26d0f7f2768682a5d3921bc827e58df9bca9b +size 5333085 diff --git a/loras/ted2020-corrupted/pa/pytorch_model_head.bin b/loras/ted2020-corrupted/pa/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f3db34ced947f0d3213490c3d79870f297327a75 --- /dev/null +++ b/loras/ted2020-corrupted/pa/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e5df58da01c5cc899006c8867accce375436210d868d740382cb36359fca8f58 +size 342547 diff --git a/loras/ted2020-corrupted/pl/adapter_config.json b/loras/ted2020-corrupted/pl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/pl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/pl/head_config.json b/loras/ted2020-corrupted/pl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/pl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/pl/pytorch_adapter.bin b/loras/ted2020-corrupted/pl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..8294ab01307e284fd04286698afe4e4b1c23f0f7 --- /dev/null +++ b/loras/ted2020-corrupted/pl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:647c41fa574588ba7ff2d8936fd254a9206169bfaa3d115453d11573906651f2 +size 5333085 diff --git a/loras/ted2020-corrupted/pl/pytorch_model_head.bin b/loras/ted2020-corrupted/pl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..858dc98d90efc81a1a2382c61ca970da73812d63 --- /dev/null +++ b/loras/ted2020-corrupted/pl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:cc5f7921275cffc4ecc62f216f21bc390bf855ca9d3945833c7d75c1e0ad7c88 +size 342547 diff --git a/loras/ted2020-corrupted/ps/adapter_config.json b/loras/ted2020-corrupted/ps/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ps/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ps/head_config.json b/loras/ted2020-corrupted/ps/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ps/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ps/pytorch_adapter.bin b/loras/ted2020-corrupted/ps/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..386dcb769831d2daf3990e1fbd475766f5a4cf27 --- /dev/null +++ b/loras/ted2020-corrupted/ps/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:64cb9f830d38082b8fa7b72728cc78a5c705d0cea30f06886f608d3a23acc6da +size 5333085 diff --git a/loras/ted2020-corrupted/ps/pytorch_model_head.bin b/loras/ted2020-corrupted/ps/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..783ea86d3cd9bd7dc26e122ba0693387f61944ec --- /dev/null +++ b/loras/ted2020-corrupted/ps/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d81f7cc7576313e0b4546a7968bddeba65bd87b38d1fd0ada76b8a890cd35814 +size 342547 diff --git a/loras/ted2020-corrupted/pt/adapter_config.json b/loras/ted2020-corrupted/pt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/pt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/pt/head_config.json b/loras/ted2020-corrupted/pt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/pt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/pt/pytorch_adapter.bin b/loras/ted2020-corrupted/pt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..925154dd9b5f1251700d83838c9a2bb8b5a2f3d9 --- /dev/null +++ b/loras/ted2020-corrupted/pt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c5fbc6bbc7249a9726e63956c43778abc08ad8120aa3b41418d7fffcf689180c +size 5333085 diff --git a/loras/ted2020-corrupted/pt/pytorch_model_head.bin b/loras/ted2020-corrupted/pt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..4ddf92d684a5c49ad7a0879eca8fea3c5a6a450b --- /dev/null +++ b/loras/ted2020-corrupted/pt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d3bc5b43358506bd36f0d26f1d981291e0b3f8528db35764b16cfb17be0ab223 +size 342547 diff --git a/loras/ted2020-corrupted/ro/adapter_config.json b/loras/ted2020-corrupted/ro/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ro/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ro/head_config.json b/loras/ted2020-corrupted/ro/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ro/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ro/pytorch_adapter.bin b/loras/ted2020-corrupted/ro/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..b0b9d70a8b4caebbf176952fd72dc48106a605c8 --- /dev/null +++ b/loras/ted2020-corrupted/ro/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3150f09d3397c19bc1fed2626039c3c867ac7142962d2cd2ad284e02da228a95 +size 5333085 diff --git a/loras/ted2020-corrupted/ro/pytorch_model_head.bin b/loras/ted2020-corrupted/ro/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ec4d34b85bc7a3a29539365c6c6cf9c6f9e51c6e --- /dev/null +++ b/loras/ted2020-corrupted/ro/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8ffec64173445e5249003106d93ef482237e55828c7a222a1610242404303829 +size 342547 diff --git a/loras/ted2020-corrupted/ru/adapter_config.json b/loras/ted2020-corrupted/ru/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ru/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ru/head_config.json b/loras/ted2020-corrupted/ru/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ru/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ru/pytorch_adapter.bin b/loras/ted2020-corrupted/ru/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..3fcbce36d19ebf2ef7d2ac69abbaddd8f06d50f1 --- /dev/null +++ b/loras/ted2020-corrupted/ru/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f69852c0fadac1031dca6cc4a6c549ad90ebf134d22e752ce24c3c53c8eaaca9 +size 5333085 diff --git a/loras/ted2020-corrupted/ru/pytorch_model_head.bin b/loras/ted2020-corrupted/ru/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..46c86d0f64a2e658483373f415f8dde85fcdb254 --- /dev/null +++ b/loras/ted2020-corrupted/ru/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5207a205b5063b1da66c158801150f74e35108745ad59156625bb41f2e873034 +size 342547 diff --git a/loras/ted2020-corrupted/si/adapter_config.json b/loras/ted2020-corrupted/si/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/si/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/si/head_config.json b/loras/ted2020-corrupted/si/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/si/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/si/pytorch_adapter.bin b/loras/ted2020-corrupted/si/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c60bb76b85f7de1d312ffd611a776d748ea8be97 --- /dev/null +++ b/loras/ted2020-corrupted/si/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6dffc0e3b268e646288e6857ac7740135529882dc1b16ea07240e53eeae8bf25 +size 5333085 diff --git a/loras/ted2020-corrupted/si/pytorch_model_head.bin b/loras/ted2020-corrupted/si/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..97c2bfbcfafe7546af66f46d9b89361a2e639412 --- /dev/null +++ b/loras/ted2020-corrupted/si/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:19593f3d11d3ffe8c9f079f3147d538889ca8f46cd766bc63946035059a4936b +size 342547 diff --git a/loras/ted2020-corrupted/sk/adapter_config.json b/loras/ted2020-corrupted/sk/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/sk/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/sk/head_config.json b/loras/ted2020-corrupted/sk/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/sk/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/sk/pytorch_adapter.bin b/loras/ted2020-corrupted/sk/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..776d9a3848029350ca48c210d55fd757877a46c7 --- /dev/null +++ b/loras/ted2020-corrupted/sk/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f7cf11d4c9f88267f48bfa68c5423c000900d7fae4667f85ce87515e794f8aeb +size 5333085 diff --git a/loras/ted2020-corrupted/sk/pytorch_model_head.bin b/loras/ted2020-corrupted/sk/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..0d10fc2cea37f6d62fd1f416f0d04a8c0f61bb1c --- /dev/null +++ b/loras/ted2020-corrupted/sk/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d728f7394e859b4558447d7349b87aedd47fddcd85b1a0f9f7c337199dba8915 +size 342547 diff --git a/loras/ted2020-corrupted/sl/adapter_config.json b/loras/ted2020-corrupted/sl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/sl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/sl/head_config.json b/loras/ted2020-corrupted/sl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/sl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/sl/pytorch_adapter.bin b/loras/ted2020-corrupted/sl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..bb6c3a486abb98bb5447fb99cc14e77a4de3d6c6 --- /dev/null +++ b/loras/ted2020-corrupted/sl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1042c2124bc4eaed7155c003e9df0c01fdfa06de850c77352b2ddd380075edaf +size 5333085 diff --git a/loras/ted2020-corrupted/sl/pytorch_model_head.bin b/loras/ted2020-corrupted/sl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..5b1e8314b8fc2737f11bd8cda4669cad9583e537 --- /dev/null +++ b/loras/ted2020-corrupted/sl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bed839984fc6f5d70ee58d39f8af6030ca5974d30077a5e37f16371b1f36a093 +size 342547 diff --git a/loras/ted2020-corrupted/sq/adapter_config.json b/loras/ted2020-corrupted/sq/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/sq/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/sq/head_config.json b/loras/ted2020-corrupted/sq/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/sq/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/sq/pytorch_adapter.bin b/loras/ted2020-corrupted/sq/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..4dbabd7d5d0de3b0a2741f19a71880200a4e4ca7 --- /dev/null +++ b/loras/ted2020-corrupted/sq/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a23e73fdddec766f72c8b91a141fbdf5ca4c5fc20ee2b6abf6d74c636d974f2 +size 5333085 diff --git a/loras/ted2020-corrupted/sq/pytorch_model_head.bin b/loras/ted2020-corrupted/sq/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..a438ab6c82bfbf1ad7cf4c015eda563beb6649e2 --- /dev/null +++ b/loras/ted2020-corrupted/sq/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f32b99fa03b9f60a52b0a75ee7a5485923f7c7aabb2a476f8a7a0064d985c7b +size 342547 diff --git a/loras/ted2020-corrupted/sr/adapter_config.json b/loras/ted2020-corrupted/sr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/sr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/sr/head_config.json b/loras/ted2020-corrupted/sr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/sr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/sr/pytorch_adapter.bin b/loras/ted2020-corrupted/sr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c84a9c66bc44ebfffb24f41a9ce6cf7c69058f71 --- /dev/null +++ b/loras/ted2020-corrupted/sr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fcb098b6668febb3068dcdeee646ec3c6c0740d6e223eb53a00b8a1f722d970a +size 5333085 diff --git a/loras/ted2020-corrupted/sr/pytorch_model_head.bin b/loras/ted2020-corrupted/sr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..6d04905ec852e5afbdfe30b7bc8c131c4f731f0d --- /dev/null +++ b/loras/ted2020-corrupted/sr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d5c5c6d521ee9fda2009f129fba0ac4f70b23b8219f9980e95ebdbfe120ae85d +size 342547 diff --git a/loras/ted2020-corrupted/sv/adapter_config.json b/loras/ted2020-corrupted/sv/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/sv/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/sv/head_config.json b/loras/ted2020-corrupted/sv/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/sv/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/sv/pytorch_adapter.bin b/loras/ted2020-corrupted/sv/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..1921666e3728333205a40aeb3d8ae7e690d853e6 --- /dev/null +++ b/loras/ted2020-corrupted/sv/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1c905649d34d7c0b06cfe01100478f99963d8e51f81188fc4884ec10d24d230b +size 5333085 diff --git a/loras/ted2020-corrupted/sv/pytorch_model_head.bin b/loras/ted2020-corrupted/sv/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f3755bd3d075f761ec334a6e69c4f77b6ebd3599 --- /dev/null +++ b/loras/ted2020-corrupted/sv/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8c912b423335e41d35b3f032fb5ce6adf05dfb3ecf37e646c2ec237a62630800 +size 342547 diff --git a/loras/ted2020-corrupted/ta/adapter_config.json b/loras/ted2020-corrupted/ta/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ta/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ta/head_config.json b/loras/ted2020-corrupted/ta/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ta/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ta/pytorch_adapter.bin b/loras/ted2020-corrupted/ta/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..4dd2e803330c218ff5fa6d0f1336d8bd9a4dff27 --- /dev/null +++ b/loras/ted2020-corrupted/ta/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e940391cf921c7911eba98cd3b5ccb1656cbaa94ca4273bc092d947bad332c84 +size 5333085 diff --git a/loras/ted2020-corrupted/ta/pytorch_model_head.bin b/loras/ted2020-corrupted/ta/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..35395d8ca69646da40378dc1270cb562d8b10ba5 --- /dev/null +++ b/loras/ted2020-corrupted/ta/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a8d31585035bd7db31f0aa5da56c003a5fb26ab12d55fe4e008a11ac76a9584 +size 342547 diff --git a/loras/ted2020-corrupted/te/adapter_config.json b/loras/ted2020-corrupted/te/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/te/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/te/head_config.json b/loras/ted2020-corrupted/te/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/te/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/te/pytorch_adapter.bin b/loras/ted2020-corrupted/te/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..0b0b1b72ccabc49a8ed0662bbc5ef8fb56ee22ef --- /dev/null +++ b/loras/ted2020-corrupted/te/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b9dc3ef44a5f8dd7aa5f1809204c7a84ddb065ff87e12905ed2b50801858672 +size 5333085 diff --git a/loras/ted2020-corrupted/te/pytorch_model_head.bin b/loras/ted2020-corrupted/te/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..3be323857feecc5a0fb707dfe3cb7abdce587ca1 --- /dev/null +++ b/loras/ted2020-corrupted/te/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5eb4e726a008fd098ac22756850d1621b143e599821039e0aff6e4df089aa479 +size 342547 diff --git a/loras/ted2020-corrupted/tg/adapter_config.json b/loras/ted2020-corrupted/tg/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/tg/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/tg/head_config.json b/loras/ted2020-corrupted/tg/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/tg/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/tg/pytorch_adapter.bin b/loras/ted2020-corrupted/tg/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..fa291b3948147c91bcffab49ba71b2e9086afe59 --- /dev/null +++ b/loras/ted2020-corrupted/tg/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e205bdc9494af483e056d8b2b37aeb8cad89083a607e47c4eb8cfd37140184e8 +size 5333085 diff --git a/loras/ted2020-corrupted/tg/pytorch_model_head.bin b/loras/ted2020-corrupted/tg/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..5704860847eacf55faf94e9285538a2412e4054c --- /dev/null +++ b/loras/ted2020-corrupted/tg/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b59811ffa0ef5202acfc9c02fa00d0861c463d2595f0b3db4926ec7e9d2c251 +size 342547 diff --git a/loras/ted2020-corrupted/th/adapter_config.json b/loras/ted2020-corrupted/th/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/th/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/th/head_config.json b/loras/ted2020-corrupted/th/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/th/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/th/pytorch_adapter.bin b/loras/ted2020-corrupted/th/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c743f9bab09b33af4646a0c10b0078aaf1299a56 --- /dev/null +++ b/loras/ted2020-corrupted/th/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bec096beadb9d0f12870d73f27fabd2da01ee95b3950d00e0610cdf1456a8782 +size 5333085 diff --git a/loras/ted2020-corrupted/th/pytorch_model_head.bin b/loras/ted2020-corrupted/th/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..eef9aff826f1f941a38d9dc4a44743cd52f66700 --- /dev/null +++ b/loras/ted2020-corrupted/th/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7b498bcd1cc71613606ecbba8249c1bfa8f52f139be6046302707cd73a847a49 +size 342547 diff --git a/loras/ted2020-corrupted/tr/adapter_config.json b/loras/ted2020-corrupted/tr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/tr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/tr/head_config.json b/loras/ted2020-corrupted/tr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/tr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/tr/pytorch_adapter.bin b/loras/ted2020-corrupted/tr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..5ca2f915b853636ad01952120c0f2fa2b1d7064d --- /dev/null +++ b/loras/ted2020-corrupted/tr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:733e64b2a223a5d9948a46334a1c58bfc363bde23167b6a883255675d03f8d15 +size 5333085 diff --git a/loras/ted2020-corrupted/tr/pytorch_model_head.bin b/loras/ted2020-corrupted/tr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..1e41f9563da307cfac264295ea322342c703aa21 --- /dev/null +++ b/loras/ted2020-corrupted/tr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:73e6b95744161fccb279f9c5391e321000e06168ccadd1c7e5f122fdce38e93c +size 342547 diff --git a/loras/ted2020-corrupted/uk/adapter_config.json b/loras/ted2020-corrupted/uk/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/uk/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/uk/head_config.json b/loras/ted2020-corrupted/uk/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/uk/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/uk/pytorch_adapter.bin b/loras/ted2020-corrupted/uk/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..41e724184e296570ec9457bfa9c04ceef80b99ed --- /dev/null +++ b/loras/ted2020-corrupted/uk/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d0754e3e2b73e5c31985950801e97afb33517145701f61c3dafd3848651f17cb +size 5333085 diff --git a/loras/ted2020-corrupted/uk/pytorch_model_head.bin b/loras/ted2020-corrupted/uk/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..8894e66d196bb744a999955fd8494211a47892e1 --- /dev/null +++ b/loras/ted2020-corrupted/uk/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f1001758bab7fae15910dcb049adf4a3a14a84e3cfd184fc4c37449ac5e24052 +size 342547 diff --git a/loras/ted2020-corrupted/ur/adapter_config.json b/loras/ted2020-corrupted/ur/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/ur/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ur/head_config.json b/loras/ted2020-corrupted/ur/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/ur/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/ur/pytorch_adapter.bin b/loras/ted2020-corrupted/ur/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..49c798eb1121495761c83c60efcfc07307a52619 --- /dev/null +++ b/loras/ted2020-corrupted/ur/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:52d26175adb2c1b3877d5de6ab4f0941f98a9763d6266f32032ee3dc28fff5e3 +size 5333085 diff --git a/loras/ted2020-corrupted/ur/pytorch_model_head.bin b/loras/ted2020-corrupted/ur/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..12123a2e7e016067a0dbc6af759985dc19b19723 --- /dev/null +++ b/loras/ted2020-corrupted/ur/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3e7ebac7b2be1bac9d6dec634a30f68614dba0543e71b92f1c5a54a548770ae5 +size 342547 diff --git a/loras/ted2020-corrupted/uz/adapter_config.json b/loras/ted2020-corrupted/uz/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/uz/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/uz/head_config.json b/loras/ted2020-corrupted/uz/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/uz/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/uz/pytorch_adapter.bin b/loras/ted2020-corrupted/uz/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..da3515597491ab10d4faac201dd658c74950c5a3 --- /dev/null +++ b/loras/ted2020-corrupted/uz/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f5bab211784acf7b4b80b189c8b1ef9b7a5081ab5a0617281fd122e4dedbf7ff +size 5333085 diff --git a/loras/ted2020-corrupted/uz/pytorch_model_head.bin b/loras/ted2020-corrupted/uz/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..0edda862808cd631dbf0ccd3c07e3c53f4abf824 --- /dev/null +++ b/loras/ted2020-corrupted/uz/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:75b05d8494f1468538235576a0683983e38b214a098fb086d3edbe3e1f8f4c54 +size 342547 diff --git a/loras/ted2020-corrupted/vi/adapter_config.json b/loras/ted2020-corrupted/vi/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/vi/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/vi/head_config.json b/loras/ted2020-corrupted/vi/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/vi/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/vi/pytorch_adapter.bin b/loras/ted2020-corrupted/vi/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..be5dce923795b3e2aa63c8bb15985a8c4afff823 --- /dev/null +++ b/loras/ted2020-corrupted/vi/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ac89cf5c1fe7704166205640411fcd9e9ec137c6547836628091e23a8754349c +size 5333085 diff --git a/loras/ted2020-corrupted/vi/pytorch_model_head.bin b/loras/ted2020-corrupted/vi/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..dc294320ba77feccdf5dca9a03a5a8ec33af6eb7 --- /dev/null +++ b/loras/ted2020-corrupted/vi/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:834b4ab55e43ff06baa7b42d2e06ab954bc4e0fc661eb1bfa86620f30a39f8f2 +size 342547 diff --git a/loras/ted2020-corrupted/zh/adapter_config.json b/loras/ted2020-corrupted/zh/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ted2020-corrupted/zh/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/zh/head_config.json b/loras/ted2020-corrupted/zh/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ted2020-corrupted/zh/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ted2020-corrupted/zh/pytorch_adapter.bin b/loras/ted2020-corrupted/zh/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..4e386bb08e2d98d721acdba03915dbcb30f290ed --- /dev/null +++ b/loras/ted2020-corrupted/zh/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2d9be904d82b8ed1776fddbdb0373839808cac3d47a1d590702db061e6317275 +size 5333085 diff --git a/loras/ted2020-corrupted/zh/pytorch_model_head.bin b/loras/ted2020-corrupted/zh/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..0a7cc3cac94326f0db4f0f7d691dae2cd00d618f --- /dev/null +++ b/loras/ted2020-corrupted/zh/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:81f9ec34cb64c68d265093789ac66624d5e8eff31b39edeb9b27722b581543ef +size 342547 diff --git a/loras/tweets/et/adapter_config.json b/loras/tweets/et/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/tweets/et/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/tweets/et/head_config.json b/loras/tweets/et/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/tweets/et/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/tweets/et/pytorch_adapter.bin b/loras/tweets/et/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ea81bbe97ba2502e874e0bfcade5b79d4f7423af --- /dev/null +++ b/loras/tweets/et/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82a15b770d8dcd298b59ef465ea30ae6d84ef702ff71ccd4a40c8c9e1074b7a8 +size 5333085 diff --git a/loras/tweets/et/pytorch_model_head.bin b/loras/tweets/et/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..4060b9f615849445349b5a2f40b86c7e93173aa6 --- /dev/null +++ b/loras/tweets/et/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f50733b697399ff28d9d1443596249bceaa5e37d388cf5b04cda01fce0818236 +size 342547 diff --git a/loras/tweets/sl/adapter_config.json b/loras/tweets/sl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/tweets/sl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/tweets/sl/head_config.json b/loras/tweets/sl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/tweets/sl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/tweets/sl/pytorch_adapter.bin b/loras/tweets/sl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..3ada663f8b7814f624c4b301c821bed1833e3aa9 --- /dev/null +++ b/loras/tweets/sl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dce511a9f3f224612dcc64daa6d059447159086d62ee0d3d5807ac359b7d86a0 +size 5333085 diff --git a/loras/tweets/sl/pytorch_model_head.bin b/loras/tweets/sl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..5dbaba2579f87561542bbbee321c94a091deb47c --- /dev/null +++ b/loras/tweets/sl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4020c64681e1b1fd2dbcb522051f0d15a59cbe7f37d2194bbb2161837bbd86a4 +size 342547 diff --git a/loras/tweets/sr/adapter_config.json b/loras/tweets/sr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/tweets/sr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/tweets/sr/head_config.json b/loras/tweets/sr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/tweets/sr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/tweets/sr/pytorch_adapter.bin b/loras/tweets/sr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..8dfa63714e4d41974c86a638a0fb8b3be8516492 --- /dev/null +++ b/loras/tweets/sr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3232c897ea842bd0aa4db3fa57fef4e793b6a966ba10c39027e341b06b9b9ed7 +size 5333085 diff --git a/loras/tweets/sr/pytorch_model_head.bin b/loras/tweets/sr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..428efc55e7da7219ea8cf7196f683a3ec9427cf5 --- /dev/null +++ b/loras/tweets/sr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6d46398a6a8a7536c99fad15cc4171786fc183fc49dc5fba7d1aed0e3016e621 +size 342547 diff --git a/loras/ud/af/adapter_config.json b/loras/ud/af/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/af/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/af/head_config.json b/loras/ud/af/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/af/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/af/pytorch_adapter.bin b/loras/ud/af/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..1ff2142455c066d8f21e600e820ec3efe826a499 --- /dev/null +++ b/loras/ud/af/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:413a6dee0a85d545939f12dbb30652731d95f129cc3ed187ab77170eb14304f8 +size 5333085 diff --git a/loras/ud/af/pytorch_model_head.bin b/loras/ud/af/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f49fa7259743011f42670852c64cb0a13bea8dcc --- /dev/null +++ b/loras/ud/af/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:752ca200de1b6b5f3d769cc2cb0487083739536fb49cd8c539e103d925ea694a +size 342547 diff --git a/loras/ud/ar/adapter_config.json b/loras/ud/ar/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/ar/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ar/head_config.json b/loras/ud/ar/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/ar/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ar/pytorch_adapter.bin b/loras/ud/ar/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..87b9cfdd78ac07fcc3366fa9884f9e3c19b10c24 --- /dev/null +++ b/loras/ud/ar/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f0533c4202addd165f27ba0093a957bc0b6560ef0fee8dc080bfaa173da434df +size 5333085 diff --git a/loras/ud/ar/pytorch_model_head.bin b/loras/ud/ar/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..d2fc72ef97a3714dd67f406242762f4e971b354a --- /dev/null +++ b/loras/ud/ar/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:70e6ec2a5d47ffec632af5b7febb0dd451b09c91025a0014e35d04057c6b6b92 +size 342547 diff --git a/loras/ud/be/adapter_config.json b/loras/ud/be/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/be/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/be/head_config.json b/loras/ud/be/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/be/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/be/pytorch_adapter.bin b/loras/ud/be/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..2e18caa0edc8fdf53007b31d7bd1cd65b8caa145 --- /dev/null +++ b/loras/ud/be/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:42f6cebb4ec72fda7d3de5b4c739093f22af21561c57b6802b86e341600cab79 +size 5333085 diff --git a/loras/ud/be/pytorch_model_head.bin b/loras/ud/be/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e95b38525eb49a10923e161053fe3a0768ed4c35 --- /dev/null +++ b/loras/ud/be/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b619c6ac1419c26b8ceeb9193b551f5c130e3e1e96fd7391739f82c38ce7c74b +size 342547 diff --git a/loras/ud/bg/adapter_config.json b/loras/ud/bg/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/bg/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/bg/head_config.json b/loras/ud/bg/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/bg/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/bg/pytorch_adapter.bin b/loras/ud/bg/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..9336d2691465dc3aa8ae2535d7024516a10a142b --- /dev/null +++ b/loras/ud/bg/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b37434cc1b0baf7d8134a12fa6dfc5a56532541e133ff240051d21dd1a7cdb9d +size 5333085 diff --git a/loras/ud/bg/pytorch_model_head.bin b/loras/ud/bg/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..b1cfa7d497b641ec1d804f794e69baabd4ac5abc --- /dev/null +++ b/loras/ud/bg/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:97beece698c58910a28fc12364f0ca09f0f28d705c6a1bd3033327ae85fc495b +size 342547 diff --git a/loras/ud/ca/adapter_config.json b/loras/ud/ca/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/ca/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ca/head_config.json b/loras/ud/ca/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/ca/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ca/pytorch_adapter.bin b/loras/ud/ca/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ac019ddf4e9d23d6db65842a32bc93ca9c91b9c2 --- /dev/null +++ b/loras/ud/ca/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2387c70b765b317c139f1874913795120ec99f26c9707367904350e2b98f51e5 +size 5333085 diff --git a/loras/ud/ca/pytorch_model_head.bin b/loras/ud/ca/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..37f12c8816f672b5dfca3048be10d2e6218e689a --- /dev/null +++ b/loras/ud/ca/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:389fafed6164ed60a98bf4b7fcc01878fe07c9688e0aec09a5bdd9e9f66d0aa2 +size 342547 diff --git a/loras/ud/cs/adapter_config.json b/loras/ud/cs/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/cs/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/cs/head_config.json b/loras/ud/cs/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/cs/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/cs/pytorch_adapter.bin b/loras/ud/cs/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..7035a48c788317d2e6254f4f4f653a623c3a5abc --- /dev/null +++ b/loras/ud/cs/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c9cc782cce903d9d47771bc55788fed4f5bd50fcee414c987e70dd4aa3165e00 +size 5333085 diff --git a/loras/ud/cs/pytorch_model_head.bin b/loras/ud/cs/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..8a87fafa700d38289b51528bf43acd6fce3b48a2 --- /dev/null +++ b/loras/ud/cs/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9fb5f20a844edd2f57c0ab8158aa5947d19cbe41e5bcec8b65ea9718aecfcdf5 +size 342547 diff --git a/loras/ud/cy/adapter_config.json b/loras/ud/cy/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/cy/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/cy/head_config.json b/loras/ud/cy/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/cy/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/cy/pytorch_adapter.bin b/loras/ud/cy/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..561906230fde0ae8d1fd700243ffc8251ea12c63 --- /dev/null +++ b/loras/ud/cy/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d8d202414473c2ed165889a2f09bb383f64b605672a5113652728c2acff22302 +size 5333085 diff --git a/loras/ud/cy/pytorch_model_head.bin b/loras/ud/cy/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..56970e74f28f9a9c30124be7195218a2ee412dda --- /dev/null +++ b/loras/ud/cy/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8a6409b0422381a4781e7c0e2a7e7c9c295c20080ae15d2ccaa4ff377fea64c2 +size 342547 diff --git a/loras/ud/da/adapter_config.json b/loras/ud/da/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/da/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/da/head_config.json b/loras/ud/da/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/da/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/da/pytorch_adapter.bin b/loras/ud/da/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..0930de9970b6999c1d8dfba07a98c35c0e0cb089 --- /dev/null +++ b/loras/ud/da/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bce4a6f4475a39d16f66a66dbba808cd0a5867195d623dff990a828da0e6c71a +size 5333085 diff --git a/loras/ud/da/pytorch_model_head.bin b/loras/ud/da/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ace4182b461413079cfdbcc5eafef6e754f5d106 --- /dev/null +++ b/loras/ud/da/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:404f3e323090cd60083312f3421d4048c6d000b02163300131a119a85f1547ad +size 342547 diff --git a/loras/ud/de/adapter_config.json b/loras/ud/de/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/de/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/de/head_config.json b/loras/ud/de/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/de/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/de/pytorch_adapter.bin b/loras/ud/de/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..a04a5868764613d52b8bcae6fc255ba50061ac93 --- /dev/null +++ b/loras/ud/de/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:82edf6ae73e327072fc4f1243bc6645c60d258e6fa16d0596b290259481a030d +size 5333085 diff --git a/loras/ud/de/pytorch_model_head.bin b/loras/ud/de/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..b4a60b486d727f9097fc5d3c5b32707327c6933e --- /dev/null +++ b/loras/ud/de/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:229569eeecf0b80f23a3eb6fbcd0d430c9d7c264f23e792e29c0ef83c9a534b1 +size 342547 diff --git a/loras/ud/el/adapter_config.json b/loras/ud/el/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/el/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/el/head_config.json b/loras/ud/el/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/el/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/el/pytorch_adapter.bin b/loras/ud/el/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..a7e9ca6b033604b6edeea871fe723e40ff8dbd6d --- /dev/null +++ b/loras/ud/el/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:54be73308b43a243264d230d1aef11855d6a4b5bd250a6431e31adaca4a5c939 +size 5333085 diff --git a/loras/ud/el/pytorch_model_head.bin b/loras/ud/el/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..cb4ee5dcd2c7596d83bf0ab938ba9d0c1d45a40c --- /dev/null +++ b/loras/ud/el/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:04398bd5a07947b61c905892d595eaa1a647466759d227fc5cf75c8ec0a290d8 +size 342547 diff --git a/loras/ud/en/adapter_config.json b/loras/ud/en/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/en/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/en/head_config.json b/loras/ud/en/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/en/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/en/pytorch_adapter.bin b/loras/ud/en/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..e4cd083b6010f687ce16adda5c35d6a1135ccd2b --- /dev/null +++ b/loras/ud/en/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b275176b42986998501ceae397570ab97dac8d1cd194787dad756162a224af75 +size 5333085 diff --git a/loras/ud/en/pytorch_model_head.bin b/loras/ud/en/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..c632ea16275ecc91b2f9d3458f1142718be1e595 --- /dev/null +++ b/loras/ud/en/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d32e67af416ab9a761bd5f4af5860130650eed96f45b973122bcc94bdfd67c8d +size 342547 diff --git a/loras/ud/es/adapter_config.json b/loras/ud/es/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/es/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/es/head_config.json b/loras/ud/es/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/es/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/es/pytorch_adapter.bin b/loras/ud/es/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..20a0f3ba99068ff0c57e826904ae3f0f66f19015 --- /dev/null +++ b/loras/ud/es/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9dfab8f4d07164b14b0eb6ac9bea53e633e2bccb88296c259ffe2d40c3dc31f7 +size 5333085 diff --git a/loras/ud/es/pytorch_model_head.bin b/loras/ud/es/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..9600c2e22b246a5dda6ee8e634b4faaefb32cf3c --- /dev/null +++ b/loras/ud/es/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f2745e7624cd3711c384d4f2a507a4b6dfea7fe34d3b7e6de443481f2d7e44d7 +size 342547 diff --git a/loras/ud/et/adapter_config.json b/loras/ud/et/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/et/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/et/head_config.json b/loras/ud/et/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/et/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/et/pytorch_adapter.bin b/loras/ud/et/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..54eb3b50357054bad39deea691bbec1fe8abfe7e --- /dev/null +++ b/loras/ud/et/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d9a62078583150c93ed2184f57d6c72d89e44fad8e66f38438fc11044f8085eb +size 5333085 diff --git a/loras/ud/et/pytorch_model_head.bin b/loras/ud/et/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..962fa10df0d82da986423c4067d3bcb4e192a2c0 --- /dev/null +++ b/loras/ud/et/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba8db1e16f9a2cb23052eb122d0b1e38908d9dfd9f2852cc76ff41f810ad8978 +size 342547 diff --git a/loras/ud/eu/adapter_config.json b/loras/ud/eu/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/eu/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/eu/head_config.json b/loras/ud/eu/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/eu/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/eu/pytorch_adapter.bin b/loras/ud/eu/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..d8b80c670a4be03c8bc56cda7f13d19dc481df69 --- /dev/null +++ b/loras/ud/eu/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:982d00ce709e7c31cda04744decba12d6a3d282d581b7fe3cde6ffef93ec4c18 +size 5333085 diff --git a/loras/ud/eu/pytorch_model_head.bin b/loras/ud/eu/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..64205595413fc3a045774afcd02fdc384b979468 --- /dev/null +++ b/loras/ud/eu/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:83c8d48afdceb26429bf6bc9bf08402085a73c58c998a1b88dc0139b5e565441 +size 342547 diff --git a/loras/ud/fa/adapter_config.json b/loras/ud/fa/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/fa/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/fa/head_config.json b/loras/ud/fa/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/fa/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/fa/pytorch_adapter.bin b/loras/ud/fa/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..4d164cf79df88c588ac87044f9022a48315abc84 --- /dev/null +++ b/loras/ud/fa/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:16e03512af818b5a1277c8c4821139455338f4d644f3690b6101203571fa3d7c +size 5333085 diff --git a/loras/ud/fa/pytorch_model_head.bin b/loras/ud/fa/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..7954b42575dbb1d71d87aa2a3e905f818ebc51c8 --- /dev/null +++ b/loras/ud/fa/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:79bf3fb93132db4abd289322c4c9d74dd31b00d605ba903fa34f6df78c96e700 +size 342547 diff --git a/loras/ud/fi/adapter_config.json b/loras/ud/fi/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/fi/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/fi/head_config.json b/loras/ud/fi/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/fi/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/fi/pytorch_adapter.bin b/loras/ud/fi/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..8a3616789b935d088d6bb31420a0822605d5727b --- /dev/null +++ b/loras/ud/fi/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b24b9a9acaa14128ea89c635958fa2a61b350000c216d2a8c9803f9c6061e07d +size 5333085 diff --git a/loras/ud/fi/pytorch_model_head.bin b/loras/ud/fi/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..c72366e33f383c0972a061187649a919c3e7bdbd --- /dev/null +++ b/loras/ud/fi/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:301d0a2e0ac71ba1c0b3fec4ed89f1bd23d241e817920dcf5a014cf58d06f443 +size 342547 diff --git a/loras/ud/fr/adapter_config.json b/loras/ud/fr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/fr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/fr/head_config.json b/loras/ud/fr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/fr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/fr/pytorch_adapter.bin b/loras/ud/fr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..3fd75a3d5b3b1c0506126acc7fc7a73fa8f445ab --- /dev/null +++ b/loras/ud/fr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba5fcbd4ea9fbaefbc1214976c6e79b7ea3b7ba94fa9a063f7885e8a5359d67c +size 5333085 diff --git a/loras/ud/fr/pytorch_model_head.bin b/loras/ud/fr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..1e511bbb59b2eb749b92a8ac04862aa242cbb9e0 --- /dev/null +++ b/loras/ud/fr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d0529645305303e9023ead80add889658ca9fc0bd1c27e32cd24a91c0f576dc9 +size 342547 diff --git a/loras/ud/ga/adapter_config.json b/loras/ud/ga/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/ga/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ga/head_config.json b/loras/ud/ga/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/ga/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ga/pytorch_adapter.bin b/loras/ud/ga/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..b8e18d5c078c75d4fb84c08b26f696cf614042ba --- /dev/null +++ b/loras/ud/ga/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e40214cfcd26429ea71c4f83c79840342733959a889b4d4a6a611d2c87e4a890 +size 5333085 diff --git a/loras/ud/ga/pytorch_model_head.bin b/loras/ud/ga/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..bd1f6c19b26bce6544cb1093814dc72180047bed --- /dev/null +++ b/loras/ud/ga/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c47bbd3be85cf8b0f5ea591fd95bde23cf707237be36594a4bc661c4fb564f13 +size 342547 diff --git a/loras/ud/gd/adapter_config.json b/loras/ud/gd/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/gd/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/gd/head_config.json b/loras/ud/gd/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/gd/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/gd/pytorch_adapter.bin b/loras/ud/gd/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..885c71cedf02e2415c43847838e0e10e2695910b --- /dev/null +++ b/loras/ud/gd/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5e4eb1414b0dece61821f140f6d80d89954d7305562e8c5fbe608d7bc6227a25 +size 5333085 diff --git a/loras/ud/gd/pytorch_model_head.bin b/loras/ud/gd/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..9651fc5cc6064eaa2c933125ef1f75872de841ef --- /dev/null +++ b/loras/ud/gd/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:da73ecca581b49477422dee0756db59920fdbd93d19fe3092efbc42bc44f0dea +size 342547 diff --git a/loras/ud/gl/adapter_config.json b/loras/ud/gl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/gl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/gl/head_config.json b/loras/ud/gl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/gl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/gl/pytorch_adapter.bin b/loras/ud/gl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..d4dd3b14081a961c420848bcbb46d9856575622f --- /dev/null +++ b/loras/ud/gl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:803841cad25d724d4c1087d3aa9fc88bc21edebcf0c6d6ba2a53da5481d4a3bf +size 5333085 diff --git a/loras/ud/gl/pytorch_model_head.bin b/loras/ud/gl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ef2725af59889e8c5757c1041cb946c7c03d79c7 --- /dev/null +++ b/loras/ud/gl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:39973d1c6554790c491c26db3e0015c002d10f36c1e65f7b545cdec7c8bbe0fb +size 342547 diff --git a/loras/ud/he/adapter_config.json b/loras/ud/he/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/he/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/he/head_config.json b/loras/ud/he/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/he/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/he/pytorch_adapter.bin b/loras/ud/he/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..68924389c8e999783b75c8a37acba3820f610c0b --- /dev/null +++ b/loras/ud/he/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:011d4ecfeeef23d15a8b1e03f463c5fdaab12d45429d3ac8f253a4e68dc4a127 +size 5333085 diff --git a/loras/ud/he/pytorch_model_head.bin b/loras/ud/he/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..7ef7e3802a64ea399e8104898760979c235bdf3e --- /dev/null +++ b/loras/ud/he/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e6f53ac746cd2b6f534982dc6ba44a897e3c1fc86cf91f723766120a1db244e +size 342547 diff --git a/loras/ud/hi/adapter_config.json b/loras/ud/hi/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/hi/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/hi/head_config.json b/loras/ud/hi/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/hi/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/hi/pytorch_adapter.bin b/loras/ud/hi/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..dcd1cd97dba3dc1be5a9796e388a32f9d4e0edd5 --- /dev/null +++ b/loras/ud/hi/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aa60f1ba5d78a0fd5da3f97235aac7369ff31b7e9201dd2876341003ac7f74ab +size 5333085 diff --git a/loras/ud/hi/pytorch_model_head.bin b/loras/ud/hi/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..c97c00e5ca4aa08e28fac77892f1c706ba083df6 --- /dev/null +++ b/loras/ud/hi/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0eb99ef26958fb871ffe482fa715d5e1fd050c7908c53e12a12891ef51dbd249 +size 342547 diff --git a/loras/ud/hu/adapter_config.json b/loras/ud/hu/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/hu/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/hu/head_config.json b/loras/ud/hu/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/hu/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/hu/pytorch_adapter.bin b/loras/ud/hu/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..b2d04a978289c56ed4f899ab8aef4a9f51d43bc3 --- /dev/null +++ b/loras/ud/hu/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0249e3485b51189f8afd191fe273c6249fbe23911adfec23cf612e552391aab3 +size 5333085 diff --git a/loras/ud/hu/pytorch_model_head.bin b/loras/ud/hu/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e67f28001a2df5737281158a58a1b1c263fce956 --- /dev/null +++ b/loras/ud/hu/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f36994180b7d21f52bc80e85d72db5691130d95109ab06f044076c313c670aa4 +size 342547 diff --git a/loras/ud/hy/adapter_config.json b/loras/ud/hy/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/hy/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/hy/head_config.json b/loras/ud/hy/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/hy/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/hy/pytorch_adapter.bin b/loras/ud/hy/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..e9ca02c28999688b1f22d9d256ec42c567a8d557 --- /dev/null +++ b/loras/ud/hy/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:964cd6bf8a84bddff30116d60537238cf9d594026b4513b5113f2c163fb94ed2 +size 5333085 diff --git a/loras/ud/hy/pytorch_model_head.bin b/loras/ud/hy/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..099f9b0c4ec975bebad8757d6a99f56b9f6da8d4 --- /dev/null +++ b/loras/ud/hy/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:595de1474cd75d76031663a80c707ee80cbedd532c1fea64731903092543f40e +size 342547 diff --git a/loras/ud/is/adapter_config.json b/loras/ud/is/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/is/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/is/head_config.json b/loras/ud/is/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/is/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/is/pytorch_adapter.bin b/loras/ud/is/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..e2b0660304e00594ee4f0ca8785ae7e49b6cffaf --- /dev/null +++ b/loras/ud/is/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:783f1953fc40ad55bbdb4635dd2325c5e6ca9b3bd1265e141fa5b5a3808b2b2b +size 5333085 diff --git a/loras/ud/is/pytorch_model_head.bin b/loras/ud/is/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f4edd404e09ec024a6e380b76e60541ef0f58d5b --- /dev/null +++ b/loras/ud/is/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ed7e6e3b06406a363a3b2cf9287ee3c6e0ae72122d99f44d6c293436e380ca3e +size 342547 diff --git a/loras/ud/it/adapter_config.json b/loras/ud/it/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/it/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/it/head_config.json b/loras/ud/it/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/it/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/it/pytorch_adapter.bin b/loras/ud/it/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..668716b1b4c884b8aa9c93518faa3969e05afe56 --- /dev/null +++ b/loras/ud/it/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1e437e6107493b199b063d54da4ae29bbb0af97076a6bbfa9b7969b3a9c15438 +size 5333085 diff --git a/loras/ud/it/pytorch_model_head.bin b/loras/ud/it/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..a1d2d5de57279e3950463f9b9e51c03fe8083422 --- /dev/null +++ b/loras/ud/it/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d2d83bd7f6fe266b9b593db19e9bdf70ca66847ccd2b12d584360b0a347d8819 +size 342547 diff --git a/loras/ud/ja/adapter_config.json b/loras/ud/ja/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/ja/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ja/head_config.json b/loras/ud/ja/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/ja/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ja/pytorch_adapter.bin b/loras/ud/ja/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..4e5ba99882b2b62b86e9f7ed1bf3ed9b664587a6 --- /dev/null +++ b/loras/ud/ja/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dee301df3ed9abc78ce8fba8d44808aae77a9c1e51196441bc4c7e1c1e876ac3 +size 5333085 diff --git a/loras/ud/ja/pytorch_model_head.bin b/loras/ud/ja/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..0e75b696a0e1d5bf481c618d50f05584d71bb542 --- /dev/null +++ b/loras/ud/ja/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b35e0fbff89e1abbc4c9b995dacec7a4caa8e6333544522d1a454b62fa3e4f28 +size 342547 diff --git a/loras/ud/kk/adapter_config.json b/loras/ud/kk/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/kk/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/kk/head_config.json b/loras/ud/kk/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/kk/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/kk/pytorch_adapter.bin b/loras/ud/kk/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..28f87c142d2853739485347dc4487d5002fc30a6 --- /dev/null +++ b/loras/ud/kk/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4a54fd80349e182336948f5531ec9dc7461449a18c6bde92140be3e9781eff00 +size 5333085 diff --git a/loras/ud/kk/pytorch_model_head.bin b/loras/ud/kk/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..86bdac36d1a1ce4028e6a106541a33be8ff138e1 --- /dev/null +++ b/loras/ud/kk/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:960e32146757b175af88a448b6bb2a4c2e725b8749282ba9af2ed6b5e6030137 +size 342547 diff --git a/loras/ud/ko/adapter_config.json b/loras/ud/ko/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/ko/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ko/head_config.json b/loras/ud/ko/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/ko/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ko/pytorch_adapter.bin b/loras/ud/ko/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..d6288a4e56b7624939b451693432e47c77180e8e --- /dev/null +++ b/loras/ud/ko/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:503dc67e3364e3bcd821b810fc9e231b22285687ef44306f3d57ccfefca3ec17 +size 5333085 diff --git a/loras/ud/ko/pytorch_model_head.bin b/loras/ud/ko/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..fdcc0748b102471b6932fe2288f795a956104db0 --- /dev/null +++ b/loras/ud/ko/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b703b47a93d1f9e223bbd4cf82a2437e78a786ad333d88e840e4097c1ede8e22 +size 342547 diff --git a/loras/ud/la/adapter_config.json b/loras/ud/la/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/la/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/la/head_config.json b/loras/ud/la/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/la/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/la/pytorch_adapter.bin b/loras/ud/la/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..5d7396c534e47ed36ff1b67340dba1590eeb7686 --- /dev/null +++ b/loras/ud/la/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3de480b0242058e032c9203f4fe1c2e656c6d07ed38d85f7c71b729f3fa64bd1 +size 5333085 diff --git a/loras/ud/la/pytorch_model_head.bin b/loras/ud/la/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e368405609c067da7355702686effe0ee087007b --- /dev/null +++ b/loras/ud/la/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a3b818f3beae959dfc3026b478a93983f99553eb6ceb9abcf95e36e212f43158 +size 342547 diff --git a/loras/ud/lt/adapter_config.json b/loras/ud/lt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/lt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/lt/head_config.json b/loras/ud/lt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/lt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/lt/pytorch_adapter.bin b/loras/ud/lt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ccce38a0b485e2f58f4d0dc23645cb10efbb5525 --- /dev/null +++ b/loras/ud/lt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b2a56e3f52bad39c67d09b3ffd2a9f202e2cf4aa1f7ca24b6210860d16172a2d +size 5333085 diff --git a/loras/ud/lt/pytorch_model_head.bin b/loras/ud/lt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ef544f088de9a72da7c505f43d041e81572f23b3 --- /dev/null +++ b/loras/ud/lt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f98aa0babefe0ddbbb73be5aed19ffd9c60c128bec24489bcbf7d91cab695ce2 +size 342547 diff --git a/loras/ud/lv/adapter_config.json b/loras/ud/lv/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/lv/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/lv/head_config.json b/loras/ud/lv/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/lv/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/lv/pytorch_adapter.bin b/loras/ud/lv/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..06cec3d1e3fa0ebd7f99091637e45bf8626fc596 --- /dev/null +++ b/loras/ud/lv/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:911ebe0dcc68e01f9a10cb22bec92362cb219ac1fc3de2ef49602ccdd1a3a15f +size 5333085 diff --git a/loras/ud/lv/pytorch_model_head.bin b/loras/ud/lv/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..045f47207434545a9c3fa22d211048e67e0a0aa2 --- /dev/null +++ b/loras/ud/lv/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d3716aad85a501d896a65b082c8bcfc0c750436835c1ba22b650bc72cb98df80 +size 342547 diff --git a/loras/ud/mr/adapter_config.json b/loras/ud/mr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/mr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/mr/head_config.json b/loras/ud/mr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/mr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/mr/pytorch_adapter.bin b/loras/ud/mr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..6ba5843dd9c5da4caaa37e281988560031df0ccb --- /dev/null +++ b/loras/ud/mr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d5c28d5408f64c86b02c7a4d314251675562e873048b7d45c90df06b3b094d7f +size 5333085 diff --git a/loras/ud/mr/pytorch_model_head.bin b/loras/ud/mr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..0f17037eb77e6dd6f4f6b50df27acd7d32e13d8f --- /dev/null +++ b/loras/ud/mr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:da2d6dada7c40c47d15af08e357940c43762794fe7fd10003470a9e35ec3cae9 +size 342547 diff --git a/loras/ud/mt/adapter_config.json b/loras/ud/mt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/mt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/mt/head_config.json b/loras/ud/mt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/mt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/mt/pytorch_adapter.bin b/loras/ud/mt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..0f5c5b9a6d2a86e73d2aa62dba79b578d7c640fd --- /dev/null +++ b/loras/ud/mt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9c68482e85e76b154ec9e3689f1c7e190b57b3b8673b746d7808ee59c0a46f6a +size 5333085 diff --git a/loras/ud/mt/pytorch_model_head.bin b/loras/ud/mt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f350d2fd3f493e3e0951e81397149c700810afce --- /dev/null +++ b/loras/ud/mt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:020b1b74e4170a2e2bde63bf31c1d310502ab0425ccfa9bc2172c4c8c47ce780 +size 342547 diff --git a/loras/ud/nl/adapter_config.json b/loras/ud/nl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/nl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/nl/head_config.json b/loras/ud/nl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/nl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/nl/pytorch_adapter.bin b/loras/ud/nl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ce539275044f1bf025c2eb23424e7117bc809efb --- /dev/null +++ b/loras/ud/nl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bd3d49728cb19f76081a36688ac8648f0b0bc1965294e17649520138425748b5 +size 5333085 diff --git a/loras/ud/nl/pytorch_model_head.bin b/loras/ud/nl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..004490ed4d80d387864b6cba05202c8262877ffd --- /dev/null +++ b/loras/ud/nl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:df3c6ad9a77d84d0f9e8c818594a2291b27f82f74064ffbdd91985a0498bfd56 +size 342547 diff --git a/loras/ud/no/adapter_config.json b/loras/ud/no/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/no/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/no/head_config.json b/loras/ud/no/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/no/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/no/pytorch_adapter.bin b/loras/ud/no/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..ff794b3ed9c72297c2c15510a23eca8411c32d80 --- /dev/null +++ b/loras/ud/no/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dbc75c82f60c13805f30f6f79572acbebaf00646b2a0659a864235e1cfec4a3c +size 5333085 diff --git a/loras/ud/no/pytorch_model_head.bin b/loras/ud/no/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..2d49eece57230f0e18813acbf353df87b9653b70 --- /dev/null +++ b/loras/ud/no/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bce5e773ede4d3b51df3387911c53e16eb7b399c8856ffd591a1ee53c497634e +size 342547 diff --git a/loras/ud/pl/adapter_config.json b/loras/ud/pl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/pl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/pl/head_config.json b/loras/ud/pl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/pl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/pl/pytorch_adapter.bin b/loras/ud/pl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..13fbc00c8d89a0dec1db4c921ab60f661b2b06c1 --- /dev/null +++ b/loras/ud/pl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1b2f16021b4f28919cabf23de6a87de599df5c958a520f78f658e12446aa6ed4 +size 5333085 diff --git a/loras/ud/pl/pytorch_model_head.bin b/loras/ud/pl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ab1c6ffb020186d9797bc002d5a5fa60d932d9e2 --- /dev/null +++ b/loras/ud/pl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:15be929547c1a1134ddf5367f60c78319e1087aa1a122c9fda05ed64eb52a316 +size 342547 diff --git a/loras/ud/pt/adapter_config.json b/loras/ud/pt/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/pt/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/pt/head_config.json b/loras/ud/pt/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/pt/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/pt/pytorch_adapter.bin b/loras/ud/pt/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..7ddf5a117ca72c8ab77dd986d54f7ded92d4771a --- /dev/null +++ b/loras/ud/pt/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c1b843ad8fc1ead2e58c6c781cfcdd205f88466e20a7b0d113a2885b408048b7 +size 5333085 diff --git a/loras/ud/pt/pytorch_model_head.bin b/loras/ud/pt/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..e4f8f59228c6b497085e15b313dc623a6a37cb2f --- /dev/null +++ b/loras/ud/pt/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:beead45c5134edb62228b42567988c49b2847a071b8e307a3e846181497f72c8 +size 342547 diff --git a/loras/ud/ro/adapter_config.json b/loras/ud/ro/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/ro/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ro/head_config.json b/loras/ud/ro/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/ro/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ro/pytorch_adapter.bin b/loras/ud/ro/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..9160cb5d3e291b49e37f3d1af58207ad255c0b57 --- /dev/null +++ b/loras/ud/ro/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:034ea770da8f5ccfd396dfcc30187e48b5f96325f2ca4403c1c6102e336c33de +size 5333085 diff --git a/loras/ud/ro/pytorch_model_head.bin b/loras/ud/ro/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..8d40472f0a762d252f310eda6cbe79a803496b71 --- /dev/null +++ b/loras/ud/ro/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5ac7703972dae8e49e275ffe5de0252cd842db922cff28238ffa37172e18bfd0 +size 342547 diff --git a/loras/ud/ru/adapter_config.json b/loras/ud/ru/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/ru/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ru/head_config.json b/loras/ud/ru/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/ru/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ru/pytorch_adapter.bin b/loras/ud/ru/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c9035965761e0eb3f398c264acf6ce425de5094d --- /dev/null +++ b/loras/ud/ru/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:98f08d7ab3eb03e1a026f1873eda00248e39d3a2e8d787723b6267d3a98d1693 +size 5333085 diff --git a/loras/ud/ru/pytorch_model_head.bin b/loras/ud/ru/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..a6fecc579c4a2376c8ec30d5cf919fb330731d50 --- /dev/null +++ b/loras/ud/ru/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:12bfeb05cf382b46d058e2d4ec9e64c5a83ec77a01c9a443a07f7875d956652e +size 342547 diff --git a/loras/ud/sk/adapter_config.json b/loras/ud/sk/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/sk/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/sk/head_config.json b/loras/ud/sk/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/sk/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/sk/pytorch_adapter.bin b/loras/ud/sk/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..419087e31edcc3cc1d954664b1055469bec3eee9 --- /dev/null +++ b/loras/ud/sk/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0a8afa7f669969ddc8fcaf16bf65b65a2a80d378c6648fe1e56857cae8636a55 +size 5333085 diff --git a/loras/ud/sk/pytorch_model_head.bin b/loras/ud/sk/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..073145ca74ec262bfc4049d49c8427fdbe3c1857 --- /dev/null +++ b/loras/ud/sk/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:be081fcc797c081c241a3e08c5bdf329ddbdd13b5e2cbb08fe1da6a5340bd09a +size 342547 diff --git a/loras/ud/sl/adapter_config.json b/loras/ud/sl/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/sl/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/sl/head_config.json b/loras/ud/sl/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/sl/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/sl/pytorch_adapter.bin b/loras/ud/sl/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..8ce28a6c45c1b8a8f6ffae4c40becaea47873f36 --- /dev/null +++ b/loras/ud/sl/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ccd1b637a136d87363248495093154b0091d7bf0664173e61284993b8c9f6ca4 +size 5333085 diff --git a/loras/ud/sl/pytorch_model_head.bin b/loras/ud/sl/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..7309281a04b3074ec3ea9edebb09fc5eb4dccf2f --- /dev/null +++ b/loras/ud/sl/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd54e2c6e86b99d9ec61b0891b01ee5d9520ebc67849d93d812c9865a4aad788 +size 342547 diff --git a/loras/ud/sr/adapter_config.json b/loras/ud/sr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/sr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/sr/head_config.json b/loras/ud/sr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/sr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/sr/pytorch_adapter.bin b/loras/ud/sr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..7c3a6f05ec6e6f48bfdede91b47ec3fada34e265 --- /dev/null +++ b/loras/ud/sr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:83c2fcf55bee3d5085606e05b1283bf32a7f1e7fc1c9e3885161fcf02c157aeb +size 5333085 diff --git a/loras/ud/sr/pytorch_model_head.bin b/loras/ud/sr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..4515fa054026473e935affeae695abd8483f2e89 --- /dev/null +++ b/loras/ud/sr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:180af870fe2b20055f94d9dc39dacf6cad098b12c5ed5ed8e841da43822c6f3d +size 342547 diff --git a/loras/ud/sv/adapter_config.json b/loras/ud/sv/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/sv/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/sv/head_config.json b/loras/ud/sv/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/sv/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/sv/pytorch_adapter.bin b/loras/ud/sv/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..c20ddb7260a42a9fba63f0da00fe2b4379d4ac9f --- /dev/null +++ b/loras/ud/sv/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5244f9ad6101455fd3691d29910af503795b9478ebdfb69e119ca16b4cc6b570 +size 5333085 diff --git a/loras/ud/sv/pytorch_model_head.bin b/loras/ud/sv/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..99e99f72c0aeb9a742c0f45f9e6a67e28a3ccfb9 --- /dev/null +++ b/loras/ud/sv/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5d5b993140c3c30ca97f03c0197d04a2b5739b56d603c6b56ef092fbd8dbee62 +size 342547 diff --git a/loras/ud/ta/adapter_config.json b/loras/ud/ta/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/ta/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ta/head_config.json b/loras/ud/ta/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/ta/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ta/pytorch_adapter.bin b/loras/ud/ta/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..a5d20617b40239083927e1e299bb6743043c7125 --- /dev/null +++ b/loras/ud/ta/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:07f1fb151ce4084496210dbc3bc5aa1a839c1b4aff4b429ffb9db685c78c7893 +size 5333085 diff --git a/loras/ud/ta/pytorch_model_head.bin b/loras/ud/ta/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..5aa543956289e325a0ccc1af4b2c993478f598bb --- /dev/null +++ b/loras/ud/ta/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:56471c0373674132090d498a98b4db12487a10685cf08e779714674ea418c92d +size 342547 diff --git a/loras/ud/tr/adapter_config.json b/loras/ud/tr/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/tr/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/tr/head_config.json b/loras/ud/tr/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/tr/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/tr/pytorch_adapter.bin b/loras/ud/tr/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..8a284554e02e126b4a1226f4d451617c0355cf5b --- /dev/null +++ b/loras/ud/tr/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:90d63882d21d8ff0efdc92524c5e1f84995f596b0f4eb2a342777a35ac4fa591 +size 5333085 diff --git a/loras/ud/tr/pytorch_model_head.bin b/loras/ud/tr/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..ceea89690651c4bed6fd1686dc7c31cb0996e478 --- /dev/null +++ b/loras/ud/tr/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b00630742e701ed5a218ef111e897b9745d3880a022278e2c3e55cc402e4ba72 +size 342547 diff --git a/loras/ud/uk/adapter_config.json b/loras/ud/uk/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/uk/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/uk/head_config.json b/loras/ud/uk/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/uk/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/uk/pytorch_adapter.bin b/loras/ud/uk/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..337a08664c30c26f4b61c694ac2c7ec3ecb1a2aa --- /dev/null +++ b/loras/ud/uk/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:24a9db6013717c11c38cc0e8a69de14be69e54fda7c5797d9906d2a9f72d9092 +size 5333085 diff --git a/loras/ud/uk/pytorch_model_head.bin b/loras/ud/uk/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..181b87f9319962ae97637839d01e4f1771c97103 --- /dev/null +++ b/loras/ud/uk/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6b865acef8a3afa5ca54742559cfecdca6a1b457136368d79b758c2cbbab8438 +size 342547 diff --git a/loras/ud/ur/adapter_config.json b/loras/ud/ur/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/ur/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ur/head_config.json b/loras/ud/ur/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/ur/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/ur/pytorch_adapter.bin b/loras/ud/ur/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..561d00d4161c34db29afde45e9b168963d443a97 --- /dev/null +++ b/loras/ud/ur/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f130343dc4e1a41ca6616ac15b22b38ba3b6868e51c09061f0d39a055f29af0a +size 5333085 diff --git a/loras/ud/ur/pytorch_model_head.bin b/loras/ud/ur/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..f586b0ed09a92c16bdf3e62f46177639cd88397b --- /dev/null +++ b/loras/ud/ur/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2f780acc0cc20d4d22a738e36d8e9622310cd2e99f9ae64ead791eedc4cfe455 +size 342547 diff --git a/loras/ud/vi/adapter_config.json b/loras/ud/vi/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/vi/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/vi/head_config.json b/loras/ud/vi/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/vi/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/vi/pytorch_adapter.bin b/loras/ud/vi/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..2527daf160e95795cfc48e98f62499c475b5f4ba --- /dev/null +++ b/loras/ud/vi/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a18d4eedd143e09fa8249a43c2d435a69bea8fa15c8e242b6118206dba0e505b +size 5333085 diff --git a/loras/ud/vi/pytorch_model_head.bin b/loras/ud/vi/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..24ee4d9a61afe857474c27b13008a67929af8c1f --- /dev/null +++ b/loras/ud/vi/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dbc2b1e407f9ba2a2b5f200b4176e2dc15f3d38531fd084c3c377619cb49ad7d +size 342547 diff --git a/loras/ud/zh/adapter_config.json b/loras/ud/zh/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5251a9ccdb3047b613e604e378133a294bdf6a55 --- /dev/null +++ b/loras/ud/zh/adapter_config.json @@ -0,0 +1,25 @@ +{ + "config": { + "alpha": 32, + "architecture": "lora", + "attn_matrices": [ + "q", + "v" + ], + "composition_mode": "add", + "dropout": 0.0, + "init_weights": "lora", + "intermediate_lora": true, + "leave_out": [], + "output_lora": false, + "r": 16, + "selfattn_lora": true, + "use_gating": false + }, + "hidden_size": 768, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": "text", + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/zh/head_config.json b/loras/ud/zh/head_config.json new file mode 100644 index 0000000000000000000000000000000000000000..004ff24bea63e7a7e696922163e2531d63145fd9 --- /dev/null +++ b/loras/ud/zh/head_config.json @@ -0,0 +1,123 @@ +{ + "config": null, + "hidden_size": 768, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_10": 10, + "LABEL_100": 100, + "LABEL_101": 101, + "LABEL_102": 102, + "LABEL_103": 103, + "LABEL_104": 104, + "LABEL_105": 105, + "LABEL_106": 106, + "LABEL_107": 107, + "LABEL_108": 108, + "LABEL_109": 109, + "LABEL_11": 11, + "LABEL_110": 110, + "LABEL_12": 12, + "LABEL_13": 13, + "LABEL_14": 14, + "LABEL_15": 15, + "LABEL_16": 16, + "LABEL_17": 17, + "LABEL_18": 18, + "LABEL_19": 19, + "LABEL_2": 2, + "LABEL_20": 20, + "LABEL_21": 21, + "LABEL_22": 22, + "LABEL_23": 23, + "LABEL_24": 24, + "LABEL_25": 25, + "LABEL_26": 26, + "LABEL_27": 27, + "LABEL_28": 28, + "LABEL_29": 29, + "LABEL_3": 3, + "LABEL_30": 30, + "LABEL_31": 31, + "LABEL_32": 32, + "LABEL_33": 33, + "LABEL_34": 34, + "LABEL_35": 35, + "LABEL_36": 36, + "LABEL_37": 37, + "LABEL_38": 38, + "LABEL_39": 39, + "LABEL_4": 4, + "LABEL_40": 40, + "LABEL_41": 41, + "LABEL_42": 42, + "LABEL_43": 43, + "LABEL_44": 44, + "LABEL_45": 45, + "LABEL_46": 46, + "LABEL_47": 47, + "LABEL_48": 48, + "LABEL_49": 49, + "LABEL_5": 5, + "LABEL_50": 50, + "LABEL_51": 51, + "LABEL_52": 52, + "LABEL_53": 53, + "LABEL_54": 54, + "LABEL_55": 55, + "LABEL_56": 56, + "LABEL_57": 57, + "LABEL_58": 58, + "LABEL_59": 59, + "LABEL_6": 6, + "LABEL_60": 60, + "LABEL_61": 61, + "LABEL_62": 62, + "LABEL_63": 63, + "LABEL_64": 64, + "LABEL_65": 65, + "LABEL_66": 66, + "LABEL_67": 67, + "LABEL_68": 68, + "LABEL_69": 69, + "LABEL_7": 7, + "LABEL_70": 70, + "LABEL_71": 71, + "LABEL_72": 72, + "LABEL_73": 73, + "LABEL_74": 74, + "LABEL_75": 75, + "LABEL_76": 76, + "LABEL_77": 77, + "LABEL_78": 78, + "LABEL_79": 79, + "LABEL_8": 8, + "LABEL_80": 80, + "LABEL_81": 81, + "LABEL_82": 82, + "LABEL_83": 83, + "LABEL_84": 84, + "LABEL_85": 85, + "LABEL_86": 86, + "LABEL_87": 87, + "LABEL_88": 88, + "LABEL_89": 89, + "LABEL_9": 9, + "LABEL_90": 90, + "LABEL_91": 91, + "LABEL_92": 92, + "LABEL_93": 93, + "LABEL_94": 94, + "LABEL_95": 95, + "LABEL_96": 96, + "LABEL_97": 97, + "LABEL_98": 98, + "LABEL_99": 99 + }, + "model_class": "SubwordXLMForTokenClassification", + "model_name": "xlmr-12l-v3_look48_lc0.1", + "model_type": "xlm-token", + "name": null, + "num_labels": 111, + "version": "0.1.1" +} \ No newline at end of file diff --git a/loras/ud/zh/pytorch_adapter.bin b/loras/ud/zh/pytorch_adapter.bin new file mode 100644 index 0000000000000000000000000000000000000000..2b4bba619c5eee01fe357148f9bed9ca624d026f --- /dev/null +++ b/loras/ud/zh/pytorch_adapter.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d30291343ed4296f589cdb73d1552fc3f4f975499c065343e437b03f9f26cd56 +size 5333085 diff --git a/loras/ud/zh/pytorch_model_head.bin b/loras/ud/zh/pytorch_model_head.bin new file mode 100644 index 0000000000000000000000000000000000000000..d2ff7ee62555f7b92df9f771b421756ea4b5afee --- /dev/null +++ b/loras/ud/zh/pytorch_model_head.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fd0d225299d28cb226060884539bc968832ed23bfb1ea7dfb26b821edaeed890 +size 342547