kingabzpro
commited on
Commit
•
d2e7c73
1
Parent(s):
6fa3745
Training in progress, step 100
Browse files
config.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1 |
{
|
2 |
-
"_name_or_path": "
|
3 |
-
"activation_dropout": 0.
|
4 |
"adapter_kernel_size": 3,
|
5 |
"adapter_stride": 2,
|
6 |
"add_adapter": false,
|
@@ -11,7 +11,7 @@
|
|
11 |
"attention_dropout": 0.1,
|
12 |
"bos_token_id": 1,
|
13 |
"classifier_proj_size": 256,
|
14 |
-
"codevector_dim":
|
15 |
"contrastive_logits_temperature": 0.1,
|
16 |
"conv_bias": true,
|
17 |
"conv_dim": [
|
@@ -42,7 +42,7 @@
|
|
42 |
2
|
43 |
],
|
44 |
"ctc_loss_reduction": "mean",
|
45 |
-
"ctc_zero_infinity":
|
46 |
"diversity_loss_weight": 0.1,
|
47 |
"do_stable_layer_norm": true,
|
48 |
"eos_token_id": 2,
|
@@ -52,7 +52,6 @@
|
|
52 |
"feat_proj_dropout": 0.0,
|
53 |
"feat_quantizer_dropout": 0.0,
|
54 |
"final_dropout": 0.0,
|
55 |
-
"gradient_checkpointing": false,
|
56 |
"hidden_act": "gelu",
|
57 |
"hidden_dropout": 0.1,
|
58 |
"hidden_size": 1024,
|
@@ -85,8 +84,8 @@
|
|
85 |
"num_hidden_layers": 24,
|
86 |
"num_negatives": 100,
|
87 |
"output_hidden_size": 1024,
|
88 |
-
"pad_token_id":
|
89 |
-
"proj_codevector_dim":
|
90 |
"tdnn_dilation": [
|
91 |
1,
|
92 |
2,
|
@@ -111,6 +110,6 @@
|
|
111 |
"torch_dtype": "float32",
|
112 |
"transformers_version": "4.15.0",
|
113 |
"use_weighted_layer_sum": false,
|
114 |
-
"vocab_size":
|
115 |
"xvector_output_dim": 512
|
116 |
}
|
|
|
1 |
{
|
2 |
+
"_name_or_path": "m3hrdadfi/wav2vec2-large-xlsr-persian-v3",
|
3 |
+
"activation_dropout": 0.09216,
|
4 |
"adapter_kernel_size": 3,
|
5 |
"adapter_stride": 2,
|
6 |
"add_adapter": false,
|
|
|
11 |
"attention_dropout": 0.1,
|
12 |
"bos_token_id": 1,
|
13 |
"classifier_proj_size": 256,
|
14 |
+
"codevector_dim": 256,
|
15 |
"contrastive_logits_temperature": 0.1,
|
16 |
"conv_bias": true,
|
17 |
"conv_dim": [
|
|
|
42 |
2
|
43 |
],
|
44 |
"ctc_loss_reduction": "mean",
|
45 |
+
"ctc_zero_infinity": true,
|
46 |
"diversity_loss_weight": 0.1,
|
47 |
"do_stable_layer_norm": true,
|
48 |
"eos_token_id": 2,
|
|
|
52 |
"feat_proj_dropout": 0.0,
|
53 |
"feat_quantizer_dropout": 0.0,
|
54 |
"final_dropout": 0.0,
|
|
|
55 |
"hidden_act": "gelu",
|
56 |
"hidden_dropout": 0.1,
|
57 |
"hidden_size": 1024,
|
|
|
84 |
"num_hidden_layers": 24,
|
85 |
"num_negatives": 100,
|
86 |
"output_hidden_size": 1024,
|
87 |
+
"pad_token_id": 0,
|
88 |
+
"proj_codevector_dim": 256,
|
89 |
"tdnn_dilation": [
|
90 |
1,
|
91 |
2,
|
|
|
110 |
"torch_dtype": "float32",
|
111 |
"transformers_version": "4.15.0",
|
112 |
"use_weighted_layer_sum": false,
|
113 |
+
"vocab_size": 40,
|
114 |
"xvector_output_dim": 512
|
115 |
}
|
pytorch_model.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:29ec4a6731223e62a8b61258c6394062a425c73b247dcbb9156602b3f6affa16
|
3 |
+
size 1262087665
|
runs/Jan21_07-31-02_8de2030b21f4/1642750290.6172373/events.out.tfevents.1642750290.8de2030b21f4.74.1
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c7d44dcc1bf8c7aab426a8986b3e3e46f0a1057631bed159f94101b61b9a9a4e
|
3 |
+
size 4763
|
runs/Jan21_07-31-02_8de2030b21f4/events.out.tfevents.1642750290.8de2030b21f4.74.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:aeecc09310c3d8b28883a12844544729e6054d57d71f256f8a9e608be407d0dc
|
3 |
+
size 5428
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2927
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:92514c4b61741d16a23c8b7a0b2d6f0b095bc912d6998c7f168d2e5018b04c13
|
3 |
size 2927
|