uaritm commited on
Commit
8af2c0e
1 Parent(s): 39d050e

Upload folder using huggingface_hub

Browse files
README.md CHANGED
@@ -85,7 +85,7 @@ The model was trained with the parameters:
85
 
86
  **DataLoader**:
87
 
88
- `torch.utils.data.dataloader.DataLoader` of length 17482 with parameters:
89
  ```
90
  {'batch_size': 64, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
91
  ```
@@ -97,7 +97,7 @@ The model was trained with the parameters:
97
  Parameters of the fit()-Method:
98
  ```
99
  {
100
- "epochs": 1,
101
  "evaluation_steps": 1000,
102
  "evaluator": "sentence_transformers.evaluation.SequentialEvaluator.SequentialEvaluator",
103
  "max_grad_norm": 1,
 
85
 
86
  **DataLoader**:
87
 
88
+ `torch.utils.data.dataloader.DataLoader` of length 27000 with parameters:
89
  ```
90
  {'batch_size': 64, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
91
  ```
 
97
  Parameters of the fit()-Method:
98
  ```
99
  {
100
+ "epochs": 3,
101
  "evaluation_steps": 1000,
102
  "evaluator": "sentence_transformers.evaluation.SequentialEvaluator.SequentialEvaluator",
103
  "max_grad_norm": 1,
eval/mse_evaluation_talks-en-ru-dev.tsv.gz_results.csv CHANGED
@@ -1,19 +1,85 @@
1
  epoch,steps,MSE
2
- 0,1000,32.71237909793854
3
- 0,2000,27.75866985321045
4
- 0,3000,24.353478848934174
5
- 0,4000,22.039371728897095
6
- 0,5000,20.264336466789246
7
- 0,6000,18.88641119003296
8
- 0,7000,18.071898818016052
9
- 0,8000,17.09209233522415
10
- 0,9000,16.36626571416855
11
- 0,10000,16.089291870594025
12
- 0,11000,15.540114045143127
13
- 0,12000,15.158072113990784
14
- 0,13000,14.720867574214935
15
- 0,14000,14.497239887714386
16
- 0,15000,14.27670270204544
17
- 0,16000,14.158044755458832
18
- 0,17000,14.065669476985931
19
- 0,-1,14.016427099704742
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  epoch,steps,MSE
2
+ 0,1000,29.636847972869873
3
+ 0,2000,26.68347656726837
4
+ 0,3000,23.35055321455002
5
+ 0,4000,21.169736981391907
6
+ 0,5000,19.645842909812927
7
+ 0,6000,18.698057532310486
8
+ 0,7000,17.581355571746826
9
+ 0,8000,16.83618575334549
10
+ 0,9000,16.208815574645996
11
+ 0,10000,15.749427676200867
12
+ 0,11000,15.436582267284393
13
+ 0,12000,14.965556561946869
14
+ 0,13000,14.624053239822388
15
+ 0,14000,14.430433511734009
16
+ 0,15000,14.215564727783203
17
+ 0,16000,14.108707010746002
18
+ 0,17000,13.80162239074707
19
+ 0,18000,13.711421191692352
20
+ 0,19000,13.469411432743073
21
+ 0,20000,13.356244564056396
22
+ 0,21000,13.144378364086151
23
+ 0,22000,13.018444180488586
24
+ 0,23000,12.99600601196289
25
+ 0,24000,12.880223989486694
26
+ 0,25000,12.749376893043518
27
+ 0,26000,12.66828179359436
28
+ 0,27000,12.53049373626709
29
+ 0,-1,12.53049373626709
30
+ 1,1000,12.485707551240921
31
+ 1,2000,12.424865365028381
32
+ 1,3000,12.345978617668152
33
+ 1,4000,12.240785360336304
34
+ 1,5000,12.14439794421196
35
+ 1,6000,12.067271023988724
36
+ 1,7000,12.064743787050247
37
+ 1,8000,12.012451142072678
38
+ 1,9000,11.965955793857574
39
+ 1,10000,11.895912885665894
40
+ 1,11000,11.853444576263428
41
+ 1,12000,11.870686709880829
42
+ 1,13000,11.79024949669838
43
+ 1,14000,11.790616065263748
44
+ 1,15000,11.700322479009628
45
+ 1,16000,11.658591777086258
46
+ 1,17000,11.67202889919281
47
+ 1,18000,11.602000147104263
48
+ 1,19000,11.577870696783066
49
+ 1,20000,11.52963861823082
50
+ 1,21000,11.51268407702446
51
+ 1,22000,11.519527435302734
52
+ 1,23000,11.499155312776566
53
+ 1,24000,11.599432677030563
54
+ 1,25000,11.497681587934494
55
+ 1,26000,11.404198408126831
56
+ 1,27000,11.416954547166824
57
+ 1,-1,11.416954547166824
58
+ 2,1000,11.39390543103218
59
+ 2,2000,11.405295133590698
60
+ 2,3000,11.376839876174927
61
+ 2,4000,11.411702632904053
62
+ 2,5000,11.369504779577255
63
+ 2,6000,11.298183351755142
64
+ 2,7000,11.254807561635971
65
+ 2,8000,11.278623342514038
66
+ 2,9000,11.265123635530472
67
+ 2,10000,11.23458743095398
68
+ 2,11000,11.234147846698761
69
+ 2,12000,11.23715490102768
70
+ 2,13000,11.184832453727722
71
+ 2,14000,11.197124421596527
72
+ 2,15000,11.171828210353851
73
+ 2,16000,11.139634251594543
74
+ 2,17000,11.155179142951965
75
+ 2,18000,11.144037544727325
76
+ 2,19000,11.143685132265091
77
+ 2,20000,11.131618171930313
78
+ 2,21000,11.132986098527908
79
+ 2,22000,11.144643276929855
80
+ 2,23000,11.121739447116852
81
+ 2,24000,11.110422760248184
82
+ 2,25000,11.099160462617874
83
+ 2,26000,11.10241711139679
84
+ 2,27000,11.101384460926056
85
+ 2,-1,11.101384460926056
eval/mse_evaluation_talks-en-uk-dev.tsv.gz_results.csv CHANGED
@@ -1,19 +1,85 @@
1
  epoch,steps,MSE
2
- 0,1000,33.775851130485535
3
- 0,2000,27.516642212867737
4
- 0,3000,24.258939921855927
5
- 0,4000,22.25603461265564
6
- 0,5000,20.49310952425003
7
- 0,6000,19.11889910697937
8
- 0,7000,18.403351306915283
9
- 0,8000,17.41473823785782
10
- 0,9000,16.708974540233612
11
- 0,10000,16.458648443222046
12
- 0,11000,15.907931327819824
13
- 0,12000,15.564973652362823
14
- 0,13000,15.240997076034546
15
- 0,14000,14.996355772018433
16
- 0,15000,14.809346199035645
17
- 0,16000,14.677967131137848
18
- 0,17000,14.61833268404007
19
- 0,-1,14.57427442073822
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  epoch,steps,MSE
2
+ 0,1000,29.611587524414062
3
+ 0,2000,26.475951075553894
4
+ 0,3000,23.38510751724243
5
+ 0,4000,21.393026411533356
6
+ 0,5000,19.86851841211319
7
+ 0,6000,18.884816765785217
8
+ 0,7000,17.858561873435974
9
+ 0,8000,17.178846895694733
10
+ 0,9000,16.563987731933594
11
+ 0,10000,16.209541261196136
12
+ 0,11000,15.902246534824371
13
+ 0,12000,15.49137532711029
14
+ 0,13000,15.144619345664978
15
+ 0,14000,15.004324913024902
16
+ 0,15000,14.861167967319489
17
+ 0,16000,14.722149074077606
18
+ 0,17000,14.399419724941254
19
+ 0,18000,14.330728352069855
20
+ 0,19000,14.136618375778198
21
+ 0,20000,14.074152708053589
22
+ 0,21000,13.86791467666626
23
+ 0,22000,13.770851492881775
24
+ 0,23000,13.734014332294464
25
+ 0,24000,13.648471236228943
26
+ 0,25000,13.546107709407806
27
+ 0,26000,13.48719596862793
28
+ 0,27000,13.34543377161026
29
+ 0,-1,13.34543377161026
30
+ 1,1000,13.384515047073364
31
+ 1,2000,13.326805830001831
32
+ 1,3000,13.252122700214386
33
+ 1,4000,13.12873512506485
34
+ 1,5000,13.058042526245117
35
+ 1,6000,13.032150268554688
36
+ 1,7000,13.024532794952393
37
+ 1,8000,12.983568012714386
38
+ 1,9000,12.933994829654694
39
+ 1,10000,12.842795252799988
40
+ 1,11000,12.78255581855774
41
+ 1,12000,12.845803797245026
42
+ 1,13000,12.764135003089905
43
+ 1,14000,12.772028148174286
44
+ 1,15000,12.685848772525787
45
+ 1,16000,12.630607187747955
46
+ 1,17000,12.640377879142761
47
+ 1,18000,12.575484812259674
48
+ 1,19000,12.577754259109497
49
+ 1,20000,12.516660988330841
50
+ 1,21000,12.513558566570282
51
+ 1,22000,12.502719461917877
52
+ 1,23000,12.53306120634079
53
+ 1,24000,12.545417249202728
54
+ 1,25000,12.51099705696106
55
+ 1,26000,12.40091323852539
56
+ 1,27000,12.422670423984528
57
+ 1,-1,12.422670423984528
58
+ 2,1000,12.39287182688713
59
+ 2,2000,12.446650862693787
60
+ 2,3000,12.404610216617584
61
+ 2,4000,12.437235563993454
62
+ 2,5000,12.375327199697495
63
+ 2,6000,12.337719649076462
64
+ 2,7000,12.28037104010582
65
+ 2,8000,12.31510192155838
66
+ 2,9000,12.29768916964531
67
+ 2,10000,12.265487760305405
68
+ 2,11000,12.259752303361893
69
+ 2,12000,12.233991175889969
70
+ 2,13000,12.216998636722565
71
+ 2,14000,12.233876436948776
72
+ 2,15000,12.200217694044113
73
+ 2,16000,12.181007862091064
74
+ 2,17000,12.201928347349167
75
+ 2,18000,12.197010964155197
76
+ 2,19000,12.213749438524246
77
+ 2,20000,12.194876372814178
78
+ 2,21000,12.205550819635391
79
+ 2,22000,12.222687900066376
80
+ 2,23000,12.178482115268707
81
+ 2,24000,12.171732634305954
82
+ 2,25000,12.157527357339859
83
+ 2,26000,12.16917484998703
84
+ 2,27000,12.16445341706276
85
+ 2,-1,12.16445341706276
eval/mse_evaluation_talks-en-vi-dev.tsv.gz_results.csv ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,steps,MSE
2
+ 0,1000,31.890451908111572
3
+ 0,2000,26.49132013320923
4
+ 0,3000,23.39797168970108
5
+ 0,4000,21.191902458667755
6
+ 0,5000,19.705557823181152
7
+ 0,6000,18.670324981212616
8
+ 0,7000,17.6055446267128
9
+ 0,8000,16.874907910823822
10
+ 0,9000,16.18591994047165
11
+ 0,10000,15.882593393325806
12
+ 0,11000,15.517708659172058
13
+ 0,12000,14.912942051887512
14
+ 0,13000,14.79342132806778
15
+ 0,14000,14.484474062919617
16
+ 0,15000,14.323283731937408
17
+ 0,16000,14.122170209884644
18
+ 0,17000,13.850988447666168
19
+ 0,18000,13.711534440517426
20
+ 0,19000,13.496537506580353
21
+ 0,20000,13.367235660552979
22
+ 0,21000,13.1413534283638
23
+ 0,22000,13.047271966934204
24
+ 0,23000,13.033472001552582
25
+ 0,24000,12.885531783103943
26
+ 0,25000,12.764434516429901
27
+ 0,26000,12.675976753234863
28
+ 0,27000,12.525415420532227
29
+ 0,-1,12.525415420532227
30
+ 1,1000,12.452717870473862
31
+ 1,2000,12.471866607666016
32
+ 1,3000,12.320271134376526
33
+ 1,4000,12.29478046298027
34
+ 1,5000,12.156340479850769
35
+ 1,6000,12.095817178487778
36
+ 1,7000,12.134081870317459
37
+ 1,8000,12.0941162109375
38
+ 1,9000,12.004471570253372
39
+ 1,10000,11.979522556066513
40
+ 1,11000,11.891839653253555
41
+ 1,12000,11.93360835313797
42
+ 1,13000,11.81677058339119
43
+ 1,14000,11.844835430383682
44
+ 1,15000,11.692845821380615
45
+ 1,16000,11.706770956516266
46
+ 1,17000,11.735942214727402
47
+ 1,18000,11.667679995298386
48
+ 1,19000,11.588936299085617
49
+ 1,20000,11.543810367584229
50
+ 1,21000,11.516707390546799
51
+ 1,22000,11.526595801115036
52
+ 1,23000,11.523006856441498
53
+ 1,24000,11.581437289714813
54
+ 1,25000,11.524339020252228
55
+ 1,26000,11.416389048099518
56
+ 1,27000,11.417466402053833
57
+ 1,-1,11.417466402053833
58
+ 2,1000,11.393678933382034
59
+ 2,2000,11.428393423557281
60
+ 2,3000,11.38225942850113
61
+ 2,4000,11.437571048736572
62
+ 2,5000,11.410973966121674
63
+ 2,6000,11.283791810274124
64
+ 2,7000,11.24892309308052
65
+ 2,8000,11.276593804359436
66
+ 2,9000,11.255781352519989
67
+ 2,10000,11.235520243644714
68
+ 2,11000,11.224151402711868
69
+ 2,12000,11.200260370969772
70
+ 2,13000,11.181414872407913
71
+ 2,14000,11.190024018287659
72
+ 2,15000,11.169525235891342
73
+ 2,16000,11.139501631259918
74
+ 2,17000,11.143650114536285
75
+ 2,18000,11.145080626010895
76
+ 2,19000,11.144429445266724
77
+ 2,20000,11.13773062825203
78
+ 2,21000,11.139272153377533
79
+ 2,22000,11.149793863296509
80
+ 2,23000,11.118503659963608
81
+ 2,24000,11.107239872217178
82
+ 2,25000,11.093289405107498
83
+ 2,26000,11.100520938634872
84
+ 2,27000,11.100372672080994
85
+ 2,-1,11.100372672080994
eval/translation_evaluation_talks-en-ru-dev.tsv.gz_results.csv CHANGED
@@ -1,19 +1,85 @@
1
  epoch,steps,src2trg,trg2src
2
- 0,1000,0.099,0.083
3
- 0,2000,0.092,0.085
4
- 0,3000,0.253,0.254
5
- 0,4000,0.679,0.685
6
- 0,5000,0.857,0.872
7
- 0,6000,0.894,0.903
8
- 0,7000,0.914,0.914
9
- 0,8000,0.935,0.924
10
- 0,9000,0.938,0.935
11
- 0,10000,0.943,0.928
12
- 0,11000,0.946,0.942
13
- 0,12000,0.946,0.944
14
- 0,13000,0.953,0.943
15
- 0,14000,0.957,0.949
16
- 0,15000,0.957,0.949
17
- 0,16000,0.959,0.949
18
- 0,17000,0.959,0.947
19
- 0,-1,0.961,0.948
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  epoch,steps,src2trg,trg2src
2
+ 0,1000,0.121,0.107
3
+ 0,2000,0.15,0.158
4
+ 0,3000,0.562,0.565
5
+ 0,4000,0.797,0.789
6
+ 0,5000,0.872,0.877
7
+ 0,6000,0.897,0.896
8
+ 0,7000,0.924,0.918
9
+ 0,8000,0.933,0.931
10
+ 0,9000,0.94,0.938
11
+ 0,10000,0.943,0.94
12
+ 0,11000,0.948,0.947
13
+ 0,12000,0.955,0.944
14
+ 0,13000,0.955,0.946
15
+ 0,14000,0.955,0.939
16
+ 0,15000,0.952,0.944
17
+ 0,16000,0.957,0.953
18
+ 0,17000,0.958,0.951
19
+ 0,18000,0.96,0.954
20
+ 0,19000,0.966,0.957
21
+ 0,20000,0.965,0.954
22
+ 0,21000,0.964,0.954
23
+ 0,22000,0.966,0.955
24
+ 0,23000,0.959,0.95
25
+ 0,24000,0.962,0.955
26
+ 0,25000,0.964,0.953
27
+ 0,26000,0.964,0.954
28
+ 0,27000,0.964,0.953
29
+ 0,-1,0.964,0.953
30
+ 1,1000,0.962,0.954
31
+ 1,2000,0.966,0.957
32
+ 1,3000,0.96,0.955
33
+ 1,4000,0.962,0.955
34
+ 1,5000,0.962,0.955
35
+ 1,6000,0.966,0.958
36
+ 1,7000,0.964,0.957
37
+ 1,8000,0.964,0.957
38
+ 1,9000,0.963,0.957
39
+ 1,10000,0.961,0.958
40
+ 1,11000,0.963,0.956
41
+ 1,12000,0.962,0.955
42
+ 1,13000,0.962,0.955
43
+ 1,14000,0.96,0.954
44
+ 1,15000,0.965,0.959
45
+ 1,16000,0.965,0.956
46
+ 1,17000,0.962,0.957
47
+ 1,18000,0.963,0.957
48
+ 1,19000,0.965,0.959
49
+ 1,20000,0.964,0.957
50
+ 1,21000,0.966,0.958
51
+ 1,22000,0.961,0.956
52
+ 1,23000,0.965,0.958
53
+ 1,24000,0.961,0.96
54
+ 1,25000,0.962,0.959
55
+ 1,26000,0.964,0.96
56
+ 1,27000,0.963,0.959
57
+ 1,-1,0.963,0.959
58
+ 2,1000,0.967,0.96
59
+ 2,2000,0.963,0.96
60
+ 2,3000,0.962,0.958
61
+ 2,4000,0.964,0.96
62
+ 2,5000,0.963,0.962
63
+ 2,6000,0.964,0.961
64
+ 2,7000,0.964,0.959
65
+ 2,8000,0.962,0.959
66
+ 2,9000,0.964,0.959
67
+ 2,10000,0.965,0.958
68
+ 2,11000,0.965,0.957
69
+ 2,12000,0.963,0.961
70
+ 2,13000,0.964,0.96
71
+ 2,14000,0.964,0.961
72
+ 2,15000,0.964,0.959
73
+ 2,16000,0.966,0.959
74
+ 2,17000,0.963,0.961
75
+ 2,18000,0.964,0.961
76
+ 2,19000,0.965,0.96
77
+ 2,20000,0.964,0.96
78
+ 2,21000,0.965,0.96
79
+ 2,22000,0.965,0.961
80
+ 2,23000,0.965,0.961
81
+ 2,24000,0.964,0.961
82
+ 2,25000,0.965,0.961
83
+ 2,26000,0.965,0.961
84
+ 2,27000,0.965,0.961
85
+ 2,-1,0.965,0.961
eval/translation_evaluation_talks-en-uk-dev.tsv.gz_results.csv CHANGED
@@ -1,19 +1,85 @@
1
  epoch,steps,src2trg,trg2src
2
- 0,1000,0.06,0.054
3
- 0,2000,0.063,0.078
4
- 0,3000,0.202,0.201
5
- 0,4000,0.638,0.624
6
- 0,5000,0.78,0.784
7
- 0,6000,0.821,0.834
8
- 0,7000,0.851,0.845
9
- 0,8000,0.862,0.861
10
- 0,9000,0.874,0.875
11
- 0,10000,0.877,0.875
12
- 0,11000,0.886,0.877
13
  0,12000,0.888,0.885
14
- 0,13000,0.889,0.887
15
- 0,14000,0.891,0.892
16
- 0,15000,0.896,0.893
17
- 0,16000,0.897,0.893
18
- 0,17000,0.896,0.892
19
- 0,-1,0.896,0.894
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  epoch,steps,src2trg,trg2src
2
+ 0,1000,0.077,0.083
3
+ 0,2000,0.141,0.139
4
+ 0,3000,0.464,0.466
5
+ 0,4000,0.689,0.698
6
+ 0,5000,0.78,0.799
7
+ 0,6000,0.82,0.843
8
+ 0,7000,0.859,0.857
9
+ 0,8000,0.868,0.872
10
+ 0,9000,0.873,0.881
11
+ 0,10000,0.883,0.876
12
+ 0,11000,0.888,0.884
13
  0,12000,0.888,0.885
14
+ 0,13000,0.892,0.88
15
+ 0,14000,0.884,0.888
16
+ 0,15000,0.889,0.885
17
+ 0,16000,0.895,0.89
18
+ 0,17000,0.895,0.893
19
+ 0,18000,0.902,0.89
20
+ 0,19000,0.896,0.891
21
+ 0,20000,0.899,0.897
22
+ 0,21000,0.897,0.9
23
+ 0,22000,0.904,0.895
24
+ 0,23000,0.902,0.9
25
+ 0,24000,0.899,0.896
26
+ 0,25000,0.901,0.902
27
+ 0,26000,0.906,0.9
28
+ 0,27000,0.907,0.899
29
+ 0,-1,0.907,0.899
30
+ 1,1000,0.905,0.899
31
+ 1,2000,0.908,0.902
32
+ 1,3000,0.906,0.897
33
+ 1,4000,0.907,0.904
34
+ 1,5000,0.902,0.898
35
+ 1,6000,0.905,0.897
36
+ 1,7000,0.907,0.899
37
+ 1,8000,0.902,0.901
38
+ 1,9000,0.903,0.902
39
+ 1,10000,0.907,0.897
40
+ 1,11000,0.91,0.901
41
+ 1,12000,0.906,0.904
42
+ 1,13000,0.906,0.898
43
+ 1,14000,0.908,0.902
44
+ 1,15000,0.909,0.904
45
+ 1,16000,0.909,0.899
46
+ 1,17000,0.912,0.904
47
+ 1,18000,0.915,0.903
48
+ 1,19000,0.912,0.908
49
+ 1,20000,0.914,0.902
50
+ 1,21000,0.911,0.901
51
+ 1,22000,0.909,0.9
52
+ 1,23000,0.913,0.906
53
+ 1,24000,0.913,0.904
54
+ 1,25000,0.911,0.903
55
+ 1,26000,0.915,0.903
56
+ 1,27000,0.912,0.904
57
+ 1,-1,0.912,0.904
58
+ 2,1000,0.918,0.905
59
+ 2,2000,0.914,0.897
60
+ 2,3000,0.912,0.898
61
+ 2,4000,0.914,0.899
62
+ 2,5000,0.916,0.902
63
+ 2,6000,0.912,0.903
64
+ 2,7000,0.911,0.902
65
+ 2,8000,0.915,0.9
66
+ 2,9000,0.918,0.898
67
+ 2,10000,0.914,0.896
68
+ 2,11000,0.915,0.902
69
+ 2,12000,0.918,0.901
70
+ 2,13000,0.915,0.898
71
+ 2,14000,0.914,0.9
72
+ 2,15000,0.917,0.903
73
+ 2,16000,0.914,0.9
74
+ 2,17000,0.914,0.9
75
+ 2,18000,0.916,0.901
76
+ 2,19000,0.916,0.897
77
+ 2,20000,0.916,0.899
78
+ 2,21000,0.916,0.899
79
+ 2,22000,0.914,0.898
80
+ 2,23000,0.916,0.898
81
+ 2,24000,0.916,0.898
82
+ 2,25000,0.913,0.9
83
+ 2,26000,0.916,0.901
84
+ 2,27000,0.916,0.9
85
+ 2,-1,0.916,0.9
eval/translation_evaluation_talks-en-vi-dev.tsv.gz_results.csv ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,steps,src2trg,trg2src
2
+ 0,1000,0.086,0.072
3
+ 0,2000,0.136,0.141
4
+ 0,3000,0.48,0.459
5
+ 0,4000,0.741,0.716
6
+ 0,5000,0.86,0.872
7
+ 0,6000,0.906,0.901
8
+ 0,7000,0.919,0.922
9
+ 0,8000,0.935,0.939
10
+ 0,9000,0.939,0.94
11
+ 0,10000,0.948,0.942
12
+ 0,11000,0.951,0.946
13
+ 0,12000,0.954,0.949
14
+ 0,13000,0.956,0.956
15
+ 0,14000,0.956,0.954
16
+ 0,15000,0.96,0.957
17
+ 0,16000,0.96,0.956
18
+ 0,17000,0.962,0.96
19
+ 0,18000,0.965,0.96
20
+ 0,19000,0.97,0.961
21
+ 0,20000,0.966,0.97
22
+ 0,21000,0.966,0.967
23
+ 0,22000,0.968,0.966
24
+ 0,23000,0.97,0.966
25
+ 0,24000,0.967,0.963
26
+ 0,25000,0.965,0.964
27
+ 0,26000,0.968,0.965
28
+ 0,27000,0.974,0.966
29
+ 0,-1,0.974,0.966
30
+ 1,1000,0.968,0.965
31
+ 1,2000,0.964,0.967
32
+ 1,3000,0.966,0.965
33
+ 1,4000,0.969,0.971
34
+ 1,5000,0.97,0.972
35
+ 1,6000,0.967,0.972
36
+ 1,7000,0.97,0.973
37
+ 1,8000,0.972,0.971
38
+ 1,9000,0.969,0.97
39
+ 1,10000,0.971,0.968
40
+ 1,11000,0.969,0.969
41
+ 1,12000,0.968,0.971
42
+ 1,13000,0.969,0.969
43
+ 1,14000,0.966,0.97
44
+ 1,15000,0.967,0.97
45
+ 1,16000,0.969,0.97
46
+ 1,17000,0.965,0.968
47
+ 1,18000,0.968,0.969
48
+ 1,19000,0.969,0.971
49
+ 1,20000,0.967,0.97
50
+ 1,21000,0.972,0.971
51
+ 1,22000,0.971,0.968
52
+ 1,23000,0.97,0.97
53
+ 1,24000,0.969,0.974
54
+ 1,25000,0.966,0.97
55
+ 1,26000,0.968,0.972
56
+ 1,27000,0.969,0.972
57
+ 1,-1,0.969,0.972
58
+ 2,1000,0.969,0.97
59
+ 2,2000,0.97,0.969
60
+ 2,3000,0.97,0.973
61
+ 2,4000,0.969,0.972
62
+ 2,5000,0.969,0.97
63
+ 2,6000,0.969,0.97
64
+ 2,7000,0.968,0.97
65
+ 2,8000,0.968,0.969
66
+ 2,9000,0.968,0.971
67
+ 2,10000,0.969,0.972
68
+ 2,11000,0.971,0.971
69
+ 2,12000,0.971,0.97
70
+ 2,13000,0.969,0.972
71
+ 2,14000,0.97,0.97
72
+ 2,15000,0.97,0.971
73
+ 2,16000,0.97,0.971
74
+ 2,17000,0.971,0.972
75
+ 2,18000,0.97,0.971
76
+ 2,19000,0.968,0.969
77
+ 2,20000,0.969,0.969
78
+ 2,21000,0.97,0.969
79
+ 2,22000,0.969,0.97
80
+ 2,23000,0.969,0.969
81
+ 2,24000,0.969,0.97
82
+ 2,25000,0.97,0.969
83
+ 2,26000,0.97,0.969
84
+ 2,27000,0.97,0.969
85
+ 2,-1,0.97,0.969
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4793ea9eb21be154c9d02fd4a03ed89c356281e0c77e35a08a80e6ab13a5b0ef
3
  size 1112197096
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66f73063024b39aee286513c19155872ba335c60bf6284e456fdccdbbf65ecda
3
  size 1112197096