metadata
model-index:
- name: karsar/gte-multilingual-base-hu
results:
- dataset:
config: hun_Latn-hun_Latn
name: MTEB BelebeleRetrieval (hun_Latn-hun_Latn)
revision: 75b399394a9803252cfec289d103de462763db7c
split: test
type: facebook/belebele
metrics:
- type: main_score
value: 84.52
- type: map_at_1
value: 75.333
- type: map_at_10
value: 81.589
- type: map_at_100
value: 81.857
- type: map_at_1000
value: 81.86099999999999
- type: map_at_20
value: 81.772
- type: map_at_3
value: 80.259
- type: map_at_5
value: 81.12
- type: mrr_at_1
value: 75.33333333333333
- type: mrr_at_10
value: 81.58880070546738
- type: mrr_at_100
value: 81.857421688991
- type: mrr_at_1000
value: 81.8609779154476
- type: mrr_at_20
value: 81.7720978955112
- type: mrr_at_3
value: 80.25925925925928
- type: mrr_at_5
value: 81.1203703703704
- type: nauc_map_at_1000_diff1
value: 83.1406785314172
- type: nauc_map_at_1000_max
value: 74.7920064826056
- type: nauc_map_at_1000_std
value: 9.75306995524627
- type: nauc_map_at_100_diff1
value: 83.13919284407014
- type: nauc_map_at_100_max
value: 74.79391249403568
- type: nauc_map_at_100_std
value: 9.755401974764572
- type: nauc_map_at_10_diff1
value: 83.08701108108275
- type: nauc_map_at_10_max
value: 74.88545349132366
- type: nauc_map_at_10_std
value: 9.997628643756531
- type: nauc_map_at_1_diff1
value: 84.86601118180064
- type: nauc_map_at_1_max
value: 72.13766122623628
- type: nauc_map_at_1_std
value: 5.8448184250495
- type: nauc_map_at_20_diff1
value: 83.08965421576404
- type: nauc_map_at_20_max
value: 74.814343891307
- type: nauc_map_at_20_std
value: 9.79543760958976
- type: nauc_map_at_3_diff1
value: 83.26947789593788
- type: nauc_map_at_3_max
value: 75.09180014138512
- type: nauc_map_at_3_std
value: 8.869243589731436
- type: nauc_map_at_5_diff1
value: 82.80692098415378
- type: nauc_map_at_5_max
value: 74.78410118423497
- type: nauc_map_at_5_std
value: 9.918556165939222
- type: nauc_mrr_at_1000_diff1
value: 83.1406785314172
- type: nauc_mrr_at_1000_max
value: 74.7920064826056
- type: nauc_mrr_at_1000_std
value: 9.75306995524627
- type: nauc_mrr_at_100_diff1
value: 83.13919284407014
- type: nauc_mrr_at_100_max
value: 74.79391249403568
- type: nauc_mrr_at_100_std
value: 9.755401974764572
- type: nauc_mrr_at_10_diff1
value: 83.08701108108275
- type: nauc_mrr_at_10_max
value: 74.88545349132366
- type: nauc_mrr_at_10_std
value: 9.997628643756531
- type: nauc_mrr_at_1_diff1
value: 84.86601118180064
- type: nauc_mrr_at_1_max
value: 72.13766122623628
- type: nauc_mrr_at_1_std
value: 5.8448184250495
- type: nauc_mrr_at_20_diff1
value: 83.08965421576404
- type: nauc_mrr_at_20_max
value: 74.814343891307
- type: nauc_mrr_at_20_std
value: 9.79543760958976
- type: nauc_mrr_at_3_diff1
value: 83.26947789593788
- type: nauc_mrr_at_3_max
value: 75.09180014138512
- type: nauc_mrr_at_3_std
value: 8.869243589731436
- type: nauc_mrr_at_5_diff1
value: 82.80692098415378
- type: nauc_mrr_at_5_max
value: 74.78410118423497
- type: nauc_mrr_at_5_std
value: 9.918556165939222
- type: nauc_ndcg_at_1000_diff1
value: 82.87191315538433
- type: nauc_ndcg_at_1000_max
value: 75.16982102922016
- type: nauc_ndcg_at_1000_std
value: 10.52683594414672
- type: nauc_ndcg_at_100_diff1
value: 82.85309969623432
- type: nauc_ndcg_at_100_max
value: 75.23344364153238
- type: nauc_ndcg_at_100_std
value: 10.607493012936814
- type: nauc_ndcg_at_10_diff1
value: 82.4777695687479
- type: nauc_ndcg_at_10_max
value: 75.7487283699043
- type: nauc_ndcg_at_10_std
value: 12.106303697974104
- type: nauc_ndcg_at_1_diff1
value: 84.86601118180064
- type: nauc_ndcg_at_1_max
value: 72.13766122623628
- type: nauc_ndcg_at_1_std
value: 5.8448184250495
- type: nauc_ndcg_at_20_diff1
value: 82.4859742044606
- type: nauc_ndcg_at_20_max
value: 75.47876530790867
- type: nauc_ndcg_at_20_std
value: 11.182962707766247
- type: nauc_ndcg_at_3_diff1
value: 82.7287522547649
- type: nauc_ndcg_at_3_max
value: 76.05573841110964
- type: nauc_ndcg_at_3_std
value: 9.697064965813148
- type: nauc_ndcg_at_5_diff1
value: 81.75303729599945
- type: nauc_ndcg_at_5_max
value: 75.49363885293647
- type: nauc_ndcg_at_5_std
value: 11.862780018840077
- type: nauc_precision_at_1000_diff1
value: .nan
- type: nauc_precision_at_1000_max
value: .nan
- type: nauc_precision_at_1000_std
value: .nan
- type: nauc_precision_at_100_diff1
value: 81.02240896358386
- type: nauc_precision_at_100_max
value: 84.94397759103616
- type: nauc_precision_at_100_std
value: 23.023653906007944
- type: nauc_precision_at_10_diff1
value: 78.26428817140886
- type: nauc_precision_at_10_max
value: 81.97044899831255
- type: nauc_precision_at_10_std
value: 28.695922813569396
- type: nauc_precision_at_1_diff1
value: 84.86601118180064
- type: nauc_precision_at_1_max
value: 72.13766122623628
- type: nauc_precision_at_1_std
value: 5.8448184250495
- type: nauc_precision_at_20_diff1
value: 75.53546671193718
- type: nauc_precision_at_20_max
value: 82.53543841779138
- type: nauc_precision_at_20_std
value: 26.165011459129044
- type: nauc_precision_at_3_diff1
value: 80.63337345511478
- type: nauc_precision_at_3_max
value: 79.77311525965087
- type: nauc_precision_at_3_std
value: 12.808288688017747
- type: nauc_precision_at_5_diff1
value: 76.24293162958055
- type: nauc_precision_at_5_max
value: 78.78492220858374
- type: nauc_precision_at_5_std
value: 21.992467398944566
- type: nauc_recall_at_1000_diff1
value: .nan
- type: nauc_recall_at_1000_max
value: .nan
- type: nauc_recall_at_1000_std
value: .nan
- type: nauc_recall_at_100_diff1
value: 81.02240896358384
- type: nauc_recall_at_100_max
value: 84.94397759103697
- type: nauc_recall_at_100_std
value: 23.02365390600609
- type: nauc_recall_at_10_diff1
value: 78.26428817140881
- type: nauc_recall_at_10_max
value: 81.97044899831292
- type: nauc_recall_at_10_std
value: 28.69592281356998
- type: nauc_recall_at_1_diff1
value: 84.86601118180064
- type: nauc_recall_at_1_max
value: 72.13766122623628
- type: nauc_recall_at_1_std
value: 5.8448184250495
- type: nauc_recall_at_20_diff1
value: 75.53546671193709
- type: nauc_recall_at_20_max
value: 82.53543841779138
- type: nauc_recall_at_20_std
value: 26.165011459128955
- type: nauc_recall_at_3_diff1
value: 80.63337345511479
- type: nauc_recall_at_3_max
value: 79.77311525965078
- type: nauc_recall_at_3_std
value: 12.808288688017635
- type: nauc_recall_at_5_diff1
value: 76.24293162958065
- type: nauc_recall_at_5_max
value: 78.78492220858384
- type: nauc_recall_at_5_std
value: 21.9924673989446
- type: ndcg_at_1
value: 75.333
- type: ndcg_at_10
value: 84.52
- type: ndcg_at_100
value: 85.765
- type: ndcg_at_1000
value: 85.85199999999999
- type: ndcg_at_20
value: 85.19200000000001
- type: ndcg_at_3
value: 81.82300000000001
- type: ndcg_at_5
value: 83.377
- type: precision_at_1
value: 75.333
- type: precision_at_10
value: 9.367
- type: precision_at_100
value: 0.993
- type: precision_at_1000
value: 0.1
- type: precision_at_20
value: 4.817
- type: precision_at_3
value: 28.778
- type: precision_at_5
value: 18.022
- type: recall_at_1
value: 75.333
- type: recall_at_10
value: 93.667
- type: recall_at_100
value: 99.333
- type: recall_at_1000
value: 100
- type: recall_at_20
value: 96.333
- type: recall_at_3
value: 86.333
- type: recall_at_5
value: 90.11099999999999
task:
type: Retrieval
- dataset:
config: hun_Latn-eng_Latn
name: MTEB BelebeleRetrieval (hun_Latn-eng_Latn)
revision: 75b399394a9803252cfec289d103de462763db7c
split: test
type: facebook/belebele
metrics:
- type: main_score
value: 83.101
- type: map_at_1
value: 73.556
- type: map_at_10
value: 80.011
- type: map_at_100
value: 80.284
- type: map_at_1000
value: 80.28999999999999
- type: map_at_20
value: 80.167
- type: map_at_3
value: 78.556
- type: map_at_5
value: 79.45
- type: mrr_at_1
value: 73.55555555555556
- type: mrr_at_10
value: 80.01075837742505
- type: mrr_at_100
value: 80.28385145874786
- type: mrr_at_1000
value: 80.29037744428379
- type: mrr_at_20
value: 80.16723881362682
- type: mrr_at_3
value: 78.55555555555557
- type: mrr_at_5
value: 79.45000000000005
- type: nauc_map_at_1000_diff1
value: 82.98075224524453
- type: nauc_map_at_1000_max
value: 70.44697913937723
- type: nauc_map_at_1000_std
value: 2.870876330916836
- type: nauc_map_at_100_diff1
value: 82.98154927855126
- type: nauc_map_at_100_max
value: 70.45314001730156
- type: nauc_map_at_100_std
value: 2.8835564687908257
- type: nauc_map_at_10_diff1
value: 82.84678746705569
- type: nauc_map_at_10_max
value: 70.60564910219685
- type: nauc_map_at_10_std
value: 3.2173817428023015
- type: nauc_map_at_1_diff1
value: 84.96087414405248
- type: nauc_map_at_1_max
value: 67.37556822473925
- type: nauc_map_at_1_std
value: -0.18018778633416485
- type: nauc_map_at_20_diff1
value: 82.99903430984571
- type: nauc_map_at_20_max
value: 70.5101842247085
- type: nauc_map_at_20_std
value: 2.8656500581185083
- type: nauc_map_at_3_diff1
value: 82.73317866665256
- type: nauc_map_at_3_max
value: 70.82189092891453
- type: nauc_map_at_3_std
value: 2.877404014763454
- type: nauc_map_at_5_diff1
value: 82.62883699148186
- type: nauc_map_at_5_max
value: 70.43732522032901
- type: nauc_map_at_5_std
value: 2.33684448635595
- type: nauc_mrr_at_1000_diff1
value: 82.98075224524453
- type: nauc_mrr_at_1000_max
value: 70.44697913937723
- type: nauc_mrr_at_1000_std
value: 2.870876330916836
- type: nauc_mrr_at_100_diff1
value: 82.98154927855126
- type: nauc_mrr_at_100_max
value: 70.45314001730156
- type: nauc_mrr_at_100_std
value: 2.8835564687908257
- type: nauc_mrr_at_10_diff1
value: 82.84678746705569
- type: nauc_mrr_at_10_max
value: 70.60564910219685
- type: nauc_mrr_at_10_std
value: 3.2173817428023015
- type: nauc_mrr_at_1_diff1
value: 84.96087414405248
- type: nauc_mrr_at_1_max
value: 67.37556822473925
- type: nauc_mrr_at_1_std
value: -0.18018778633416485
- type: nauc_mrr_at_20_diff1
value: 82.99903430984571
- type: nauc_mrr_at_20_max
value: 70.5101842247085
- type: nauc_mrr_at_20_std
value: 2.8656500581185083
- type: nauc_mrr_at_3_diff1
value: 82.73317866665256
- type: nauc_mrr_at_3_max
value: 70.82189092891453
- type: nauc_mrr_at_3_std
value: 2.877404014763454
- type: nauc_mrr_at_5_diff1
value: 82.62883699148186
- type: nauc_mrr_at_5_max
value: 70.43732522032901
- type: nauc_mrr_at_5_std
value: 2.33684448635595
- type: nauc_ndcg_at_1000_diff1
value: 82.79629619444499
- type: nauc_ndcg_at_1000_max
value: 71.0076319722781
- type: nauc_ndcg_at_1000_std
value: 3.7381182242930464
- type: nauc_ndcg_at_100_diff1
value: 82.83397899949976
- type: nauc_ndcg_at_100_max
value: 71.18500075519282
- type: nauc_ndcg_at_100_std
value: 4.132633921815649
- type: nauc_ndcg_at_10_diff1
value: 82.27523217182016
- type: nauc_ndcg_at_10_max
value: 72.0518800681147
- type: nauc_ndcg_at_10_std
value: 5.790750391790079
- type: nauc_ndcg_at_1_diff1
value: 84.96087414405248
- type: nauc_ndcg_at_1_max
value: 67.37556822473925
- type: nauc_ndcg_at_1_std
value: -0.18018778633416485
- type: nauc_ndcg_at_20_diff1
value: 82.97351669681734
- type: nauc_ndcg_at_20_max
value: 71.696223879848
- type: nauc_ndcg_at_20_std
value: 4.317621313606248
- type: nauc_ndcg_at_3_diff1
value: 81.94856814665425
- type: nauc_ndcg_at_3_max
value: 72.12618276320369
- type: nauc_ndcg_at_3_std
value: 4.24079481069279
- type: nauc_ndcg_at_5_diff1
value: 81.70729227032065
- type: nauc_ndcg_at_5_max
value: 71.49462564098667
- type: nauc_ndcg_at_5_std
value: 3.2972695951356026
- type: nauc_precision_at_1000_diff1
value: .nan
- type: nauc_precision_at_1000_max
value: .nan
- type: nauc_precision_at_1000_std
value: .nan
- type: nauc_precision_at_100_diff1
value: 87.0577860773934
- type: nauc_precision_at_100_max
value: 89.96265172735775
- type: nauc_precision_at_100_std
value: 46.50378669986745
- type: nauc_precision_at_10_diff1
value: 78.78833584715922
- type: nauc_precision_at_10_max
value: 82.80399339222852
- type: nauc_precision_at_10_std
value: 26.482798247504043
- type: nauc_precision_at_1_diff1
value: 84.96087414405248
- type: nauc_precision_at_1_max
value: 67.37556822473925
- type: nauc_precision_at_1_std
value: -0.18018778633416485
- type: nauc_precision_at_20_diff1
value: 85.8989050165519
- type: nauc_precision_at_20_max
value: 83.69302266361063
- type: nauc_precision_at_20_std
value: 18.366437484084422
- type: nauc_precision_at_3_diff1
value: 78.9832574887652
- type: nauc_precision_at_3_max
value: 77.09635804306849
- type: nauc_precision_at_3_std
value: 9.522746282198423
- type: nauc_precision_at_5_diff1
value: 77.29170153028011
- type: nauc_precision_at_5_max
value: 76.44009966344971
- type: nauc_precision_at_5_std
value: 7.807590041092263
- type: nauc_recall_at_1000_diff1
value: .nan
- type: nauc_recall_at_1000_max
value: .nan
- type: nauc_recall_at_1000_std
value: .nan
- type: nauc_recall_at_100_diff1
value: 87.05778607739357
- type: nauc_recall_at_100_max
value: 89.96265172735798
- type: nauc_recall_at_100_std
value: 46.50378669986473
- type: nauc_recall_at_10_diff1
value: 78.78833584715929
- type: nauc_recall_at_10_max
value: 82.80399339222863
- type: nauc_recall_at_10_std
value: 26.482798247504114
- type: nauc_recall_at_1_diff1
value: 84.96087414405248
- type: nauc_recall_at_1_max
value: 67.37556822473925
- type: nauc_recall_at_1_std
value: -0.18018778633416485
- type: nauc_recall_at_20_diff1
value: 85.89890501655198
- type: nauc_recall_at_20_max
value: 83.69302266361105
- type: nauc_recall_at_20_std
value: 18.366437484084596
- type: nauc_recall_at_3_diff1
value: 78.98325748876506
- type: nauc_recall_at_3_max
value: 77.09635804306843
- type: nauc_recall_at_3_std
value: 9.522746282198543
- type: nauc_recall_at_5_diff1
value: 77.29170153028014
- type: nauc_recall_at_5_max
value: 76.44009966345003
- type: nauc_recall_at_5_std
value: 7.807590041092628
- type: ndcg_at_1
value: 73.556
- type: ndcg_at_10
value: 83.101
- type: ndcg_at_100
value: 84.44
- type: ndcg_at_1000
value: 84.576
- type: ndcg_at_20
value: 83.685
- type: ndcg_at_3
value: 80.12899999999999
- type: ndcg_at_5
value: 81.76
- type: precision_at_1
value: 73.556
- type: precision_at_10
value: 9.278
- type: precision_at_100
value: 0.9900000000000001
- type: precision_at_1000
value: 0.1
- type: precision_at_20
value: 4.756
- type: precision_at_3
value: 28.222
- type: precision_at_5
value: 17.732999999999997
- type: recall_at_1
value: 73.556
- type: recall_at_10
value: 92.778
- type: recall_at_100
value: 99
- type: recall_at_1000
value: 100
- type: recall_at_20
value: 95.111
- type: recall_at_3
value: 84.667
- type: recall_at_5
value: 88.667
task:
type: Retrieval
- dataset:
config: eng_Latn-hun_Latn
name: MTEB BelebeleRetrieval (eng_Latn-hun_Latn)
revision: 75b399394a9803252cfec289d103de462763db7c
split: test
type: facebook/belebele
metrics:
- type: main_score
value: 79.566
- type: map_at_1
value: 68.77799999999999
- type: map_at_10
value: 76.066
- type: map_at_100
value: 76.458
- type: map_at_1000
value: 76.461
- type: map_at_20
value: 76.31700000000001
- type: map_at_3
value: 74.574
- type: map_at_5
value: 75.313
- type: mrr_at_1
value: 68.77777777777779
- type: mrr_at_10
value: 76.06582892416228
- type: mrr_at_100
value: 76.4576917776982
- type: mrr_at_1000
value: 76.4606772011712
- type: mrr_at_20
value: 76.31707897652173
- type: mrr_at_3
value: 74.57407407407408
- type: mrr_at_5
value: 75.31296296296298
- type: nauc_map_at_1000_diff1
value: 78.84953659186075
- type: nauc_map_at_1000_max
value: 72.95319354945482
- type: nauc_map_at_1000_std
value: 10.486309697258944
- type: nauc_map_at_100_diff1
value: 78.85001032587249
- type: nauc_map_at_100_max
value: 72.95426928056654
- type: nauc_map_at_100_std
value: 10.483813797190445
- type: nauc_map_at_10_diff1
value: 78.70526630685885
- type: nauc_map_at_10_max
value: 73.08594298926852
- type: nauc_map_at_10_std
value: 10.738816033415464
- type: nauc_map_at_1_diff1
value: 81.09615388366655
- type: nauc_map_at_1_max
value: 71.1979198161698
- type: nauc_map_at_1_std
value: 8.331854082828887
- type: nauc_map_at_20_diff1
value: 78.79122644457274
- type: nauc_map_at_20_max
value: 73.01380930608467
- type: nauc_map_at_20_std
value: 10.704116143456673
- type: nauc_map_at_3_diff1
value: 78.74281223888761
- type: nauc_map_at_3_max
value: 72.63559145253599
- type: nauc_map_at_3_std
value: 8.949203596261594
- type: nauc_map_at_5_diff1
value: 78.65419312305616
- type: nauc_map_at_5_max
value: 72.9058122628293
- type: nauc_map_at_5_std
value: 9.7937096286835
- type: nauc_mrr_at_1000_diff1
value: 78.84953659186075
- type: nauc_mrr_at_1000_max
value: 72.95319354945482
- type: nauc_mrr_at_1000_std
value: 10.486309697258944
- type: nauc_mrr_at_100_diff1
value: 78.85001032587249
- type: nauc_mrr_at_100_max
value: 72.95426928056654
- type: nauc_mrr_at_100_std
value: 10.483813797190445
- type: nauc_mrr_at_10_diff1
value: 78.70526630685885
- type: nauc_mrr_at_10_max
value: 73.08594298926852
- type: nauc_mrr_at_10_std
value: 10.738816033415464
- type: nauc_mrr_at_1_diff1
value: 81.09615388366655
- type: nauc_mrr_at_1_max
value: 71.1979198161698
- type: nauc_mrr_at_1_std
value: 8.331854082828887
- type: nauc_mrr_at_20_diff1
value: 78.79122644457274
- type: nauc_mrr_at_20_max
value: 73.01380930608467
- type: nauc_mrr_at_20_std
value: 10.704116143456673
- type: nauc_mrr_at_3_diff1
value: 78.74281223888761
- type: nauc_mrr_at_3_max
value: 72.63559145253599
- type: nauc_mrr_at_3_std
value: 8.949203596261594
- type: nauc_mrr_at_5_diff1
value: 78.65419312305616
- type: nauc_mrr_at_5_max
value: 72.9058122628293
- type: nauc_mrr_at_5_std
value: 9.7937096286835
- type: nauc_ndcg_at_1000_diff1
value: 78.52448995680614
- type: nauc_ndcg_at_1000_max
value: 73.37903474635324
- type: nauc_ndcg_at_1000_std
value: 11.392330178257408
- type: nauc_ndcg_at_100_diff1
value: 78.53878497635209
- type: nauc_ndcg_at_100_max
value: 73.42928694892305
- type: nauc_ndcg_at_100_std
value: 11.423236407965893
- type: nauc_ndcg_at_10_diff1
value: 77.78729543542663
- type: nauc_ndcg_at_10_max
value: 74.27536294287631
- type: nauc_ndcg_at_10_std
value: 13.367470230818531
- type: nauc_ndcg_at_1_diff1
value: 81.09615388366655
- type: nauc_ndcg_at_1_max
value: 71.1979198161698
- type: nauc_ndcg_at_1_std
value: 8.331854082828887
- type: nauc_ndcg_at_20_diff1
value: 78.11371876111158
- type: nauc_ndcg_at_20_max
value: 74.02257268275582
- type: nauc_ndcg_at_20_std
value: 13.38983541565397
- type: nauc_ndcg_at_3_diff1
value: 77.90927387771217
- type: nauc_ndcg_at_3_max
value: 73.13407026390954
- type: nauc_ndcg_at_3_std
value: 9.168566635087503
- type: nauc_ndcg_at_5_diff1
value: 77.67838156766508
- type: nauc_ndcg_at_5_max
value: 73.69372140026746
- type: nauc_ndcg_at_5_std
value: 10.869564941918753
- type: nauc_precision_at_1000_diff1
value: .nan
- type: nauc_precision_at_1000_max
value: .nan
- type: nauc_precision_at_1000_std
value: .nan
- type: nauc_precision_at_100_diff1
value: 81.13134142545822
- type: nauc_precision_at_100_max
value: 83.58232181761284
- type: nauc_precision_at_100_std
value: 21.934329287273574
- type: nauc_precision_at_10_diff1
value: 72.03877629483148
- type: nauc_precision_at_10_max
value: 82.56879222277134
- type: nauc_precision_at_10_std
value: 32.573735376503436
- type: nauc_precision_at_1_diff1
value: 81.09615388366655
- type: nauc_precision_at_1_max
value: 71.1979198161698
- type: nauc_precision_at_1_std
value: 8.331854082828887
- type: nauc_precision_at_20_diff1
value: 72.38870390923626
- type: nauc_precision_at_20_max
value: 84.61321635572419
- type: nauc_precision_at_20_std
value: 44.62061554181373
- type: nauc_precision_at_3_diff1
value: 74.8060913337046
- type: nauc_precision_at_3_max
value: 74.98517702862958
- type: nauc_precision_at_3_std
value: 9.985540040173424
- type: nauc_precision_at_5_diff1
value: 73.36844940809873
- type: nauc_precision_at_5_max
value: 77.25002548160217
- type: nauc_precision_at_5_std
value: 15.98642922666964
- type: nauc_recall_at_1000_diff1
value: .nan
- type: nauc_recall_at_1000_max
value: .nan
- type: nauc_recall_at_1000_std
value: .nan
- type: nauc_recall_at_100_diff1
value: 81.13134142545819
- type: nauc_recall_at_100_max
value: 83.58232181761542
- type: nauc_recall_at_100_std
value: 21.934329287270415
- type: nauc_recall_at_10_diff1
value: 72.03877629483173
- type: nauc_recall_at_10_max
value: 82.56879222277149
- type: nauc_recall_at_10_std
value: 32.57373537650351
- type: nauc_recall_at_1_diff1
value: 81.09615388366655
- type: nauc_recall_at_1_max
value: 71.1979198161698
- type: nauc_recall_at_1_std
value: 8.331854082828887
- type: nauc_recall_at_20_diff1
value: 72.3887039092368
- type: nauc_recall_at_20_max
value: 84.61321635572472
- type: nauc_recall_at_20_std
value: 44.62061554181426
- type: nauc_recall_at_3_diff1
value: 74.80609133370437
- type: nauc_recall_at_3_max
value: 74.98517702862947
- type: nauc_recall_at_3_std
value: 9.985540040173234
- type: nauc_recall_at_5_diff1
value: 73.36844940809881
- type: nauc_recall_at_5_max
value: 77.2500254816023
- type: nauc_recall_at_5_std
value: 15.986429226669795
- type: ndcg_at_1
value: 68.77799999999999
- type: ndcg_at_10
value: 79.566
- type: ndcg_at_100
value: 81.453
- type: ndcg_at_1000
value: 81.538
- type: ndcg_at_20
value: 80.47
- type: ndcg_at_3
value: 76.427
- type: ndcg_at_5
value: 77.78
- type: precision_at_1
value: 68.77799999999999
- type: precision_at_10
value: 9.056000000000001
- type: precision_at_100
value: 0.993
- type: precision_at_1000
value: 0.1
- type: precision_at_20
value: 4.7059999999999995
- type: precision_at_3
value: 27.259
- type: precision_at_5
value: 17.022000000000002
- type: recall_at_1
value: 68.77799999999999
- type: recall_at_10
value: 90.556
- type: recall_at_100
value: 99.333
- type: recall_at_1000
value: 100
- type: recall_at_20
value: 94.111
- type: recall_at_3
value: 81.77799999999999
- type: recall_at_5
value: 85.111
task:
type: Retrieval
- dataset:
config: eng_Latn-hun_Latn
name: MTEB BibleNLPBitextMining (eng_Latn-hun_Latn)
revision: 264a18480c529d9e922483839b4b9758e690b762
split: train
type: davidstap/biblenlp-corpus-mmteb
metrics:
- type: accuracy
value: 76.953125
- type: f1
value: 71.86197916666667
- type: main_score
value: 71.86197916666667
- type: precision
value: 69.81770833333334
- type: recall
value: 76.953125
task:
type: BitextMining
- dataset:
config: hun_Latn-eng_Latn
name: MTEB BibleNLPBitextMining (hun_Latn-eng_Latn)
revision: 264a18480c529d9e922483839b4b9758e690b762
split: train
type: davidstap/biblenlp-corpus-mmteb
metrics:
- type: accuracy
value: 85.15625
- type: f1
value: 81.11979166666667
- type: main_score
value: 81.11979166666667
- type: precision
value: 79.23177083333334
- type: recall
value: 85.15625
task:
type: BitextMining
- dataset:
config: default
name: MTEB HunSum2AbstractiveRetrieval (default)
revision: 24e1445c8180d937f0a16f8ae8a62e77cc952e56
split: test
type: SZTAKI-HLT/HunSum-2-abstractive
metrics:
- type: main_score
value: 80.28
- type: map_at_1
value: 80.28
- type: map_at_10
value: 84.904
- type: map_at_100
value: 85.146
- type: map_at_1000
value: 85.151
- type: map_at_20
value: 85.06
- type: map_at_3
value: 83.94200000000001
- type: map_at_5
value: 84.568
- type: mrr_at_1
value: 80.28028028028028
- type: mrr_at_10
value: 84.90418990418983
- type: mrr_at_100
value: 85.14630746415321
- type: mrr_at_1000
value: 85.15092295233896
- type: mrr_at_20
value: 85.0602092982553
- type: mrr_at_3
value: 83.94227560894221
- type: mrr_at_5
value: 84.56790123456784
- type: nauc_map_at_1000_diff1
value: 88.79097723803582
- type: nauc_map_at_1000_max
value: 73.98309940197979
- type: nauc_map_at_1000_std
value: 8.542178772436394
- type: nauc_map_at_100_diff1
value: 88.78960257384884
- type: nauc_map_at_100_max
value: 73.98791845872987
- type: nauc_map_at_100_std
value: 8.548067869435512
- type: nauc_map_at_10_diff1
value: 88.69014462571421
- type: nauc_map_at_10_max
value: 74.14057877906977
- type: nauc_map_at_10_std
value: 8.859912246907557
- type: nauc_map_at_1_diff1
value: 89.97737048914607
- type: nauc_map_at_1_max
value: 71.45737401142345
- type: nauc_map_at_1_std
value: 4.759040523357649
- type: nauc_map_at_20_diff1
value: 88.77444892974766
- type: nauc_map_at_20_max
value: 74.02582129341143
- type: nauc_map_at_20_std
value: 8.540371486769867
- type: nauc_map_at_3_diff1
value: 88.63122911038182
- type: nauc_map_at_3_max
value: 74.00939999585503
- type: nauc_map_at_3_std
value: 8.800123446309959
- type: nauc_map_at_5_diff1
value: 88.60660773314059
- type: nauc_map_at_5_max
value: 74.22116501727933
- type: nauc_map_at_5_std
value: 9.32259867764275
- type: nauc_mrr_at_1000_diff1
value: 88.79097723803582
- type: nauc_mrr_at_1000_max
value: 73.98309940197979
- type: nauc_mrr_at_1000_std
value: 8.542178772436394
- type: nauc_mrr_at_100_diff1
value: 88.78960257384884
- type: nauc_mrr_at_100_max
value: 73.98791845872987
- type: nauc_mrr_at_100_std
value: 8.548067869435512
- type: nauc_mrr_at_10_diff1
value: 88.69014462571421
- type: nauc_mrr_at_10_max
value: 74.14057877906977
- type: nauc_mrr_at_10_std
value: 8.859912246907557
- type: nauc_mrr_at_1_diff1
value: 89.97737048914607
- type: nauc_mrr_at_1_max
value: 71.45737401142345
- type: nauc_mrr_at_1_std
value: 4.759040523357649
- type: nauc_mrr_at_20_diff1
value: 88.77444892974766
- type: nauc_mrr_at_20_max
value: 74.02582129341143
- type: nauc_mrr_at_20_std
value: 8.540371486769867
- type: nauc_mrr_at_3_diff1
value: 88.63122911038182
- type: nauc_mrr_at_3_max
value: 74.00939999585503
- type: nauc_mrr_at_3_std
value: 8.800123446309959
- type: nauc_mrr_at_5_diff1
value: 88.60660773314059
- type: nauc_mrr_at_5_max
value: 74.22116501727933
- type: nauc_mrr_at_5_std
value: 9.32259867764275
- type: nauc_ndcg_at_1000_diff1
value: 88.63409129211333
- type: nauc_ndcg_at_1000_max
value: 74.5309057755214
- type: nauc_ndcg_at_1000_std
value: 9.413371605896351
- type: nauc_ndcg_at_100_diff1
value: 88.59861614511203
- type: nauc_ndcg_at_100_max
value: 74.70664505666113
- type: nauc_ndcg_at_100_std
value: 9.763156219113043
- type: nauc_ndcg_at_10_diff1
value: 88.1528125374064
- type: nauc_ndcg_at_10_max
value: 75.41722645550642
- type: nauc_ndcg_at_10_std
value: 10.681431101624623
- type: nauc_ndcg_at_1_diff1
value: 89.97737048914607
- type: nauc_ndcg_at_1_max
value: 71.45737401142345
- type: nauc_ndcg_at_1_std
value: 4.759040523357649
- type: nauc_ndcg_at_20_diff1
value: 88.50160845988172
- type: nauc_ndcg_at_20_max
value: 74.96664595209074
- type: nauc_ndcg_at_20_std
value: 9.380512837396632
- type: nauc_ndcg_at_3_diff1
value: 88.05768683477201
- type: nauc_ndcg_at_3_max
value: 75.02313831321838
- type: nauc_ndcg_at_3_std
value: 10.537764520686949
- type: nauc_ndcg_at_5_diff1
value: 87.97191682703232
- type: nauc_ndcg_at_5_max
value: 75.52848194372503
- type: nauc_ndcg_at_5_std
value: 11.774494710346415
- type: nauc_precision_at_1000_diff1
value: 100
- type: nauc_precision_at_1000_max
value: 95.64221684222578
- type: nauc_precision_at_1000_std
value: 90.73902903394449
- type: nauc_precision_at_100_diff1
value: 87.42504295503302
- type: nauc_precision_at_100_max
value: 92.05171886330325
- type: nauc_precision_at_100_std
value: 52.07235965498117
- type: nauc_precision_at_10_diff1
value: 84.8336822266199
- type: nauc_precision_at_10_max
value: 83.65267406867247
- type: nauc_precision_at_10_std
value: 21.99447008561385
- type: nauc_precision_at_1_diff1
value: 89.97737048914607
- type: nauc_precision_at_1_max
value: 71.45737401142345
- type: nauc_precision_at_1_std
value: 4.759040523357649
- type: nauc_precision_at_20_diff1
value: 86.96846782406944
- type: nauc_precision_at_20_max
value: 82.96275580308742
- type: nauc_precision_at_20_std
value: 13.610628290924112
- type: nauc_precision_at_3_diff1
value: 85.85061149321128
- type: nauc_precision_at_3_max
value: 78.89652765257202
- type: nauc_precision_at_3_std
value: 17.23042587138826
- type: nauc_precision_at_5_diff1
value: 84.94624842605198
- type: nauc_precision_at_5_max
value: 81.81787797176683
- type: nauc_precision_at_5_std
value: 23.760497470734563
- type: nauc_recall_at_1000_diff1
value: 100
- type: nauc_recall_at_1000_max
value: 95.64221684223966
- type: nauc_recall_at_1000_std
value: 90.73902903396089
- type: nauc_recall_at_100_diff1
value: 87.42504295503399
- type: nauc_recall_at_100_max
value: 92.0517188633039
- type: nauc_recall_at_100_std
value: 52.07235965498028
- type: nauc_recall_at_10_diff1
value: 84.83368222661971
- type: nauc_recall_at_10_max
value: 83.65267406867238
- type: nauc_recall_at_10_std
value: 21.99447008561354
- type: nauc_recall_at_1_diff1
value: 89.97737048914607
- type: nauc_recall_at_1_max
value: 71.45737401142345
- type: nauc_recall_at_1_std
value: 4.759040523357649
- type: nauc_recall_at_20_diff1
value: 86.96846782406952
- type: nauc_recall_at_20_max
value: 82.96275580308719
- type: nauc_recall_at_20_std
value: 13.610628290923948
- type: nauc_recall_at_3_diff1
value: 85.85061149321133
- type: nauc_recall_at_3_max
value: 78.89652765257206
- type: nauc_recall_at_3_std
value: 17.23042587138828
- type: nauc_recall_at_5_diff1
value: 84.94624842605192
- type: nauc_recall_at_5_max
value: 81.81787797176689
- type: nauc_recall_at_5_std
value: 23.760497470734606
- type: ndcg_at_1
value: 80.28
- type: ndcg_at_10
value: 86.997
- type: ndcg_at_100
value: 88.161
- type: ndcg_at_1000
value: 88.28200000000001
- type: ndcg_at_20
value: 87.566
- type: ndcg_at_3
value: 85.064
- type: ndcg_at_5
value: 86.195
- type: precision_at_1
value: 80.28
- type: precision_at_10
value: 9.349
- type: precision_at_100
value: 0.989
- type: precision_at_1000
value: 0.1
- type: precision_at_20
value: 4.787
- type: precision_at_3
value: 29.429
- type: precision_at_5
value: 18.208
- type: recall_at_1
value: 80.28
- type: recall_at_10
value: 93.49300000000001
- type: recall_at_100
value: 98.899
- type: recall_at_1000
value: 99.85000000000001
- type: recall_at_20
value: 95.746
- type: recall_at_3
value: 88.288
- type: recall_at_5
value: 91.04100000000001
task:
type: Retrieval
- dataset:
config: hu
name: MTEB MassiveIntentClassification (hu)
revision: 4672e20407010da34463acc759c162ca9734bca6
split: test
type: mteb/amazon_massive_intent
metrics:
- type: accuracy
value: 55.823806321452594
- type: f1
value: 50.78756643922222
- type: f1_weighted
value: 55.11520680706619
- type: main_score
value: 55.823806321452594
task:
type: Classification
- dataset:
config: hu
name: MTEB MassiveIntentClassification (hu)
revision: 4672e20407010da34463acc759c162ca9734bca6
split: validation
type: mteb/amazon_massive_intent
metrics:
- type: accuracy
value: 54.66797835710773
- type: f1
value: 49.5096347438473
- type: f1_weighted
value: 53.73310190085533
- type: main_score
value: 54.66797835710773
task:
type: Classification
- dataset:
config: hu
name: MTEB MassiveScenarioClassification (hu)
revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
split: test
type: mteb/amazon_massive_scenario
metrics:
- type: accuracy
value: 63.37256220578345
- type: f1
value: 61.58399629628825
- type: f1_weighted
value: 63.13464436259451
- type: main_score
value: 63.37256220578345
task:
type: Classification
- dataset:
config: hu
name: MTEB MassiveScenarioClassification (hu)
revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
split: validation
type: mteb/amazon_massive_scenario
metrics:
- type: accuracy
value: 62.03148057058534
- type: f1
value: 60.9893800714451
- type: f1_weighted
value: 61.85509382597554
- type: main_score
value: 62.03148057058534
task:
type: Classification
- dataset:
config: hu
name: MTEB MultiEURLEXMultilabelClassification (hu)
revision: 2aea5a6dc8fdcfeca41d0fb963c0a338930bde5c
split: test
type: mteb/eurlex-multilingual
metrics:
- type: accuracy
value: 3.0380000000000003
- type: f1
value: 27.32839484028383
- type: lrap
value: 41.09644076719448
- type: main_score
value: 3.0380000000000003
task:
type: MultilabelClassification
- dataset:
config: arb_Arab-hun_Latn
name: MTEB NTREXBitextMining (arb_Arab-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 83.07461191787682
- type: f1
value: 78.97012184944082
- type: main_score
value: 78.97012184944082
- type: precision
value: 77.16324486730095
- type: recall
value: 83.07461191787682
task:
type: BitextMining
- dataset:
config: ben_Beng-hun_Latn
name: MTEB NTREXBitextMining (ben_Beng-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 81.2719078617927
- type: f1
value: 76.6133724396118
- type: main_score
value: 76.6133724396118
- type: precision
value: 74.5247633354794
- type: recall
value: 81.2719078617927
task:
type: BitextMining
- dataset:
config: deu_Latn-hun_Latn
name: MTEB NTREXBitextMining (deu_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 90.78617926890335
- type: f1
value: 88.27073944249707
- type: main_score
value: 88.27073944249707
- type: precision
value: 87.1056584877316
- type: recall
value: 90.78617926890335
task:
type: BitextMining
- dataset:
config: ell_Grek-hun_Latn
name: MTEB NTREXBitextMining (ell_Grek-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 89.08362543815723
- type: f1
value: 86.19429143715574
- type: main_score
value: 86.19429143715574
- type: precision
value: 84.85728592889333
- type: recall
value: 89.08362543815723
task:
type: BitextMining
- dataset:
config: eng_Latn-hun_Latn
name: MTEB NTREXBitextMining (eng_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 93.23985978968453
- type: f1
value: 91.4087798364213
- type: main_score
value: 91.4087798364213
- type: precision
value: 90.57753296611585
- type: recall
value: 93.23985978968453
task:
type: BitextMining
- dataset:
config: fas_Arab-hun_Latn
name: MTEB NTREXBitextMining (fas_Arab-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 86.37956935403105
- type: f1
value: 82.8442663995994
- type: main_score
value: 82.8442663995994
- type: precision
value: 81.2635620096812
- type: recall
value: 86.37956935403105
task:
type: BitextMining
- dataset:
config: fin_Latn-hun_Latn
name: MTEB NTREXBitextMining (fin_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 85.42814221331997
- type: f1
value: 81.80031952690942
- type: main_score
value: 81.80031952690942
- type: precision
value: 80.1235186112502
- type: recall
value: 85.42814221331997
task:
type: BitextMining
- dataset:
config: fra_Latn-hun_Latn
name: MTEB NTREXBitextMining (fra_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 90.83625438157236
- type: f1
value: 88.31079953263227
- type: main_score
value: 88.31079953263227
- type: precision
value: 87.11817726589885
- type: recall
value: 90.83625438157236
task:
type: BitextMining
- dataset:
config: heb_Hebr-hun_Latn
name: MTEB NTREXBitextMining (heb_Hebr-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 81.32198297446169
- type: f1
value: 76.4972458688032
- type: main_score
value: 76.4972458688032
- type: precision
value: 74.3578462932494
- type: recall
value: 81.32198297446169
task:
type: BitextMining
- dataset:
config: hin_Deva-hun_Latn
name: MTEB NTREXBitextMining (hin_Deva-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 86.37956935403105
- type: f1
value: 82.83341679185445
- type: main_score
value: 82.83341679185445
- type: precision
value: 81.21563297326942
- type: recall
value: 86.37956935403105
task:
type: BitextMining
- dataset:
config: hun_Latn-arb_Arab
name: MTEB NTREXBitextMining (hun_Latn-arb_Arab)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 82.22333500250375
- type: f1
value: 77.76760378663232
- type: main_score
value: 77.76760378663232
- type: precision
value: 75.81634356296348
- type: recall
value: 82.22333500250375
task:
type: BitextMining
- dataset:
config: hun_Latn-ben_Beng
name: MTEB NTREXBitextMining (hun_Latn-ben_Beng)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 77.56634952428642
- type: f1
value: 72.28537250319926
- type: main_score
value: 72.28537250319926
- type: precision
value: 70.02032811121445
- type: recall
value: 77.56634952428642
task:
type: BitextMining
- dataset:
config: hun_Latn-deu_Latn
name: MTEB NTREXBitextMining (hun_Latn-deu_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 91.4371557336004
- type: f1
value: 89.27391086629945
- type: main_score
value: 89.27391086629945
- type: precision
value: 88.24904022700719
- type: recall
value: 91.4371557336004
task:
type: BitextMining
- dataset:
config: hun_Latn-ell_Grek
name: MTEB NTREXBitextMining (hun_Latn-ell_Grek)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 88.3825738607912
- type: f1
value: 85.36900588978705
- type: main_score
value: 85.36900588978705
- type: precision
value: 83.98848272408614
- type: recall
value: 88.3825738607912
task:
type: BitextMining
- dataset:
config: hun_Latn-eng_Latn
name: MTEB NTREXBitextMining (hun_Latn-eng_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 94.2914371557336
- type: f1
value: 92.68903355032549
- type: main_score
value: 92.68903355032549
- type: precision
value: 91.92121515606743
- type: recall
value: 94.2914371557336
task:
type: BitextMining
- dataset:
config: hun_Latn-fas_Arab
name: MTEB NTREXBitextMining (hun_Latn-fas_Arab)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 84.72709063595393
- type: f1
value: 80.81622433650475
- type: main_score
value: 80.81622433650475
- type: precision
value: 79.05524954097814
- type: recall
value: 84.72709063595393
task:
type: BitextMining
- dataset:
config: hun_Latn-fin_Latn
name: MTEB NTREXBitextMining (hun_Latn-fin_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 83.57536304456686
- type: f1
value: 79.32338984667477
- type: main_score
value: 79.32338984667477
- type: precision
value: 77.45833035267187
- type: recall
value: 83.57536304456686
task:
type: BitextMining
- dataset:
config: hun_Latn-fra_Latn
name: MTEB NTREXBitextMining (hun_Latn-fra_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 90.48572859288933
- type: f1
value: 87.94954336266304
- type: main_score
value: 87.94954336266304
- type: precision
value: 86.75429811383744
- type: recall
value: 90.48572859288933
task:
type: BitextMining
- dataset:
config: hun_Latn-heb_Hebr
name: MTEB NTREXBitextMining (hun_Latn-heb_Hebr)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 77.21582373560341
- type: f1
value: 71.82277384330463
- type: main_score
value: 71.82277384330463
- type: precision
value: 69.55856403653098
- type: recall
value: 77.21582373560341
task:
type: BitextMining
- dataset:
config: hun_Latn-hin_Deva
name: MTEB NTREXBitextMining (hun_Latn-hin_Deva)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 84.77716574862293
- type: f1
value: 80.97423913648251
- type: main_score
value: 80.97423913648251
- type: precision
value: 79.27265898848273
- type: recall
value: 84.77716574862293
task:
type: BitextMining
- dataset:
config: hun_Latn-ind_Latn
name: MTEB NTREXBitextMining (hun_Latn-ind_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 90.0350525788683
- type: f1
value: 87.28592889334
- type: main_score
value: 87.28592889334
- type: precision
value: 85.99732932732432
- type: recall
value: 90.0350525788683
task:
type: BitextMining
- dataset:
config: hun_Latn-jpn_Jpan
name: MTEB NTREXBitextMining (hun_Latn-jpn_Jpan)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 84.37656484727091
- type: f1
value: 80.59017097074182
- type: main_score
value: 80.59017097074182
- type: precision
value: 78.94508429310633
- type: recall
value: 84.37656484727091
task:
type: BitextMining
- dataset:
config: hun_Latn-kor_Hang
name: MTEB NTREXBitextMining (hun_Latn-kor_Hang)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 80.77115673510265
- type: f1
value: 76.35683684256543
- type: main_score
value: 76.35683684256543
- type: precision
value: 74.47361699114327
- type: recall
value: 80.77115673510265
task:
type: BitextMining
- dataset:
config: hun_Latn-lav_Latn
name: MTEB NTREXBitextMining (hun_Latn-lav_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 76.81522283425137
- type: f1
value: 71.24067052960392
- type: main_score
value: 71.24067052960392
- type: precision
value: 68.94003703968652
- type: recall
value: 76.81522283425137
task:
type: BitextMining
- dataset:
config: hun_Latn-lit_Latn
name: MTEB NTREXBitextMining (hun_Latn-lit_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 77.3159739609414
- type: f1
value: 71.92622266733433
- type: main_score
value: 71.92622266733433
- type: precision
value: 69.58461501776473
- type: recall
value: 77.3159739609414
task:
type: BitextMining
- dataset:
config: hun_Latn-nld_Latn
name: MTEB NTREXBitextMining (hun_Latn-nld_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 90.98647971957938
- type: f1
value: 88.5027541311968
- type: main_score
value: 88.5027541311968
- type: precision
value: 87.33683859122017
- type: recall
value: 90.98647971957938
task:
type: BitextMining
- dataset:
config: hun_Latn-pol_Latn
name: MTEB NTREXBitextMining (hun_Latn-pol_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 88.43264897346019
- type: f1
value: 85.33896082218565
- type: main_score
value: 85.33896082218565
- type: precision
value: 83.90919712902688
- type: recall
value: 88.43264897346019
task:
type: BitextMining
- dataset:
config: hun_Latn-por_Latn
name: MTEB NTREXBitextMining (hun_Latn-por_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 90.68602904356536
- type: f1
value: 88.09046903688868
- type: main_score
value: 88.09046903688868
- type: precision
value: 86.88449340677683
- type: recall
value: 90.68602904356536
task:
type: BitextMining
- dataset:
config: hun_Latn-rus_Cyrl
name: MTEB NTREXBitextMining (hun_Latn-rus_Cyrl)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 90.0350525788683
- type: f1
value: 87.35770322149892
- type: main_score
value: 87.35770322149892
- type: precision
value: 86.10832916040727
- type: recall
value: 90.0350525788683
task:
type: BitextMining
- dataset:
config: hun_Latn-spa_Latn
name: MTEB NTREXBitextMining (hun_Latn-spa_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 92.58888332498748
- type: f1
value: 90.64763812385245
- type: main_score
value: 90.64763812385245
- type: precision
value: 89.75880487397765
- type: recall
value: 92.58888332498748
task:
type: BitextMining
- dataset:
config: hun_Latn-swa_Latn
name: MTEB NTREXBitextMining (hun_Latn-swa_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 72.60891337005508
- type: f1
value: 66.62728580605396
- type: main_score
value: 66.62728580605396
- type: precision
value: 64.22842597229177
- type: recall
value: 72.60891337005508
task:
type: BitextMining
- dataset:
config: hun_Latn-swe_Latn
name: MTEB NTREXBitextMining (hun_Latn-swe_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 89.03355032548824
- type: f1
value: 86.01569020196962
- type: main_score
value: 86.01569020196962
- type: precision
value: 84.59105324653648
- type: recall
value: 89.03355032548824
task:
type: BitextMining
- dataset:
config: hun_Latn-tam_Taml
name: MTEB NTREXBitextMining (hun_Latn-tam_Taml)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 74.66199298948423
- type: f1
value: 68.7971639999682
- type: main_score
value: 68.7971639999682
- type: precision
value: 66.36091041323891
- type: recall
value: 74.66199298948423
task:
type: BitextMining
- dataset:
config: hun_Latn-tur_Latn
name: MTEB NTREXBitextMining (hun_Latn-tur_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 87.08062093139709
- type: f1
value: 83.79736271073277
- type: main_score
value: 83.79736271073277
- type: precision
value: 82.33278489162315
- type: recall
value: 87.08062093139709
task:
type: BitextMining
- dataset:
config: hun_Latn-vie_Latn
name: MTEB NTREXBitextMining (hun_Latn-vie_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 89.78467701552329
- type: f1
value: 87.0288766483058
- type: main_score
value: 87.0288766483058
- type: precision
value: 85.76781839425806
- type: recall
value: 89.78467701552329
task:
type: BitextMining
- dataset:
config: hun_Latn-zho_Hant
name: MTEB NTREXBitextMining (hun_Latn-zho_Hant)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 87.33099649474211
- type: f1
value: 84.02103154732097
- type: main_score
value: 84.02103154732097
- type: precision
value: 82.51877816725089
- type: recall
value: 87.33099649474211
task:
type: BitextMining
- dataset:
config: hun_Latn-zul_Latn
name: MTEB NTREXBitextMining (hun_Latn-zul_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 51.92789183775663
- type: f1
value: 43.912175926815536
- type: main_score
value: 43.912175926815536
- type: precision
value: 41.09881091478487
- type: recall
value: 51.92789183775663
task:
type: BitextMining
- dataset:
config: ind_Latn-hun_Latn
name: MTEB NTREXBitextMining (ind_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 90.1352028042063
- type: f1
value: 87.51722822328732
- type: main_score
value: 87.51722822328732
- type: precision
value: 86.31280253713905
- type: recall
value: 90.1352028042063
task:
type: BitextMining
- dataset:
config: jpn_Jpan-hun_Latn
name: MTEB NTREXBitextMining (jpn_Jpan-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 84.37656484727091
- type: f1
value: 80.56084126189283
- type: main_score
value: 80.56084126189283
- type: precision
value: 78.84743782340176
- type: recall
value: 84.37656484727091
task:
type: BitextMining
- dataset:
config: kor_Hang-hun_Latn
name: MTEB NTREXBitextMining (kor_Hang-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 83.47521281922884
- type: f1
value: 79.41519421990128
- type: main_score
value: 79.41519421990128
- type: precision
value: 77.57350311181057
- type: recall
value: 83.47521281922884
task:
type: BitextMining
- dataset:
config: lav_Latn-hun_Latn
name: MTEB NTREXBitextMining (lav_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 82.12318477716575
- type: f1
value: 78.18656556262967
- type: main_score
value: 78.18656556262967
- type: precision
value: 76.41879485895511
- type: recall
value: 82.12318477716575
task:
type: BitextMining
- dataset:
config: lit_Latn-hun_Latn
name: MTEB NTREXBitextMining (lit_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 81.67250876314472
- type: f1
value: 77.52628943415122
- type: main_score
value: 77.52628943415122
- type: precision
value: 75.62426973794024
- type: recall
value: 81.67250876314472
task:
type: BitextMining
- dataset:
config: nld_Latn-hun_Latn
name: MTEB NTREXBitextMining (nld_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 91.03655483224837
- type: f1
value: 88.62404718188392
- type: main_score
value: 88.62404718188392
- type: precision
value: 87.50584209647806
- type: recall
value: 91.03655483224837
task:
type: BitextMining
- dataset:
config: pol_Latn-hun_Latn
name: MTEB NTREXBitextMining (pol_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 88.73309964947421
- type: f1
value: 85.63869613944726
- type: main_score
value: 85.63869613944726
- type: precision
value: 84.21799365715239
- type: recall
value: 88.73309964947421
task:
type: BitextMining
- dataset:
config: por_Latn-hun_Latn
name: MTEB NTREXBitextMining (por_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 91.03655483224837
- type: f1
value: 88.54782173259889
- type: main_score
value: 88.54782173259889
- type: precision
value: 87.39108662994491
- type: recall
value: 91.03655483224837
task:
type: BitextMining
- dataset:
config: rus_Cyrl-hun_Latn
name: MTEB NTREXBitextMining (rus_Cyrl-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 88.88332498748123
- type: f1
value: 85.8447194601426
- type: main_score
value: 85.8447194601426
- type: precision
value: 84.45751961275246
- type: recall
value: 88.88332498748123
task:
type: BitextMining
- dataset:
config: spa_Latn-hun_Latn
name: MTEB NTREXBitextMining (spa_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 92.13820731096645
- type: f1
value: 89.933233183108
- type: main_score
value: 89.933233183108
- type: precision
value: 88.92004673677182
- type: recall
value: 92.13820731096645
task:
type: BitextMining
- dataset:
config: swa_Latn-hun_Latn
name: MTEB NTREXBitextMining (swa_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 75.7636454682023
- type: f1
value: 71.19297994610965
- type: main_score
value: 71.19297994610965
- type: precision
value: 69.29461652796655
- type: recall
value: 75.7636454682023
task:
type: BitextMining
- dataset:
config: swe_Latn-hun_Latn
name: MTEB NTREXBitextMining (swe_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 89.83475212819229
- type: f1
value: 87.25779144907837
- type: main_score
value: 87.25779144907837
- type: precision
value: 86.05408112168253
- type: recall
value: 89.83475212819229
task:
type: BitextMining
- dataset:
config: tam_Taml-hun_Latn
name: MTEB NTREXBitextMining (tam_Taml-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 78.01702553830746
- type: f1
value: 72.70886488462853
- type: main_score
value: 72.70886488462853
- type: precision
value: 70.39064549204758
- type: recall
value: 78.01702553830746
task:
type: BitextMining
- dataset:
config: tur_Latn-hun_Latn
name: MTEB NTREXBitextMining (tur_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 87.33099649474211
- type: f1
value: 84.28094522736485
- type: main_score
value: 84.28094522736485
- type: precision
value: 82.89100317142379
- type: recall
value: 87.33099649474211
task:
type: BitextMining
- dataset:
config: vie_Latn-hun_Latn
name: MTEB NTREXBitextMining (vie_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 89.23385077616425
- type: f1
value: 86.38290769487564
- type: main_score
value: 86.38290769487564
- type: precision
value: 85.08763144717074
- type: recall
value: 89.23385077616425
task:
type: BitextMining
- dataset:
config: zho_Hant-hun_Latn
name: MTEB NTREXBitextMining (zho_Hant-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 86.52979469203805
- type: f1
value: 82.964446670005
- type: main_score
value: 82.964446670005
- type: precision
value: 81.4104490068436
- type: recall
value: 86.52979469203805
task:
type: BitextMining
- dataset:
config: zul_Latn-hun_Latn
name: MTEB NTREXBitextMining (zul_Latn-hun_Latn)
revision: ed9a4403ed4adbfaf4aab56d5b2709e9f6c3ba33
split: test
type: mteb/NTREX
metrics:
- type: accuracy
value: 54.98247371056585
- type: f1
value: 48.79136275731169
- type: main_score
value: 48.79136275731169
- type: precision
value: 46.53637850035387
- type: recall
value: 54.98247371056585
task:
type: BitextMining
- dataset:
config: rom-hun
name: MTEB RomaTalesBitextMining (rom-hun)
revision: f4394dbca6845743cd33eba77431767b232ef489
split: test
type: kardosdrur/roma-tales
metrics:
- type: accuracy
value: 10.69767441860465
- type: f1
value: 6.300343882963222
- type: main_score
value: 6.300343882963222
- type: precision
value: 5.2912513842746405
- type: recall
value: 10.69767441860465
task:
type: BitextMining
- dataset:
config: hun_Latn
name: MTEB SIB200Classification (hun_Latn)
revision: a74d7350ea12af010cfb1c21e34f1f81fd2e615b
split: test
type: mteb/sib200
metrics:
- type: accuracy
value: 70.7843137254902
- type: f1
value: 69.54715341688494
- type: f1_weighted
value: 70.80982490835149
- type: main_score
value: 70.7843137254902
task:
type: Classification
- dataset:
config: hun_Latn
name: MTEB SIB200Classification (hun_Latn)
revision: a74d7350ea12af010cfb1c21e34f1f81fd2e615b
split: train
type: mteb/sib200
metrics:
- type: accuracy
value: 71.04136947218261
- type: f1
value: 69.53067958950989
- type: f1_weighted
value: 71.08855534234819
- type: main_score
value: 71.04136947218261
task:
type: Classification
- dataset:
config: hun_Latn
name: MTEB SIB200Classification (hun_Latn)
revision: a74d7350ea12af010cfb1c21e34f1f81fd2e615b
split: validation
type: mteb/sib200
metrics:
- type: accuracy
value: 67.77777777777779
- type: f1
value: 65.81682696212664
- type: f1_weighted
value: 68.15630936254685
- type: main_score
value: 67.77777777777779
task:
type: Classification
- dataset:
config: hun_Latn
name: MTEB SIB200ClusteringS2S (hun_Latn)
revision: a74d7350ea12af010cfb1c21e34f1f81fd2e615b
split: test
type: mteb/sib200
metrics:
- type: main_score
value: 37.555486757695725
- type: v_measure
value: 37.555486757695725
- type: v_measure_std
value: 5.704486435014278
task:
type: Clustering
- dataset:
config: hun-eng
name: MTEB Tatoeba (hun-eng)
revision: 69e8f12da6e31d59addadda9a9c8a2e601a0e282
split: test
type: mteb/tatoeba-bitext-mining
metrics:
- type: accuracy
value: 80.9
- type: f1
value: 76.77888888888889
- type: main_score
value: 76.77888888888889
- type: precision
value: 74.9825
- type: recall
value: 80.9
task:
type: BitextMining
tags:
- mteb
base_model: Alibaba-NLP/gte-multilingual-base language:
- hu library_name: sentence-transformers license: apache-2.0
gte-multilingual-base-hu
This is a sentence-transformers model finetuned from Alibaba-NLP/gte-multilingual-base on the train dataset. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.
Model Details
Model Description
- Model Type: Sentence Transformer
- Base model: Alibaba-NLP/gte-multilingual-base
- Maximum Sequence Length: 8192 tokens
- Output Dimensionality: 768 tokens
- Similarity Function: Cosine Similarity
- Training Dataset:
- train
- Language: hu
- License: apache-2.0
Model Sources
- Documentation: Sentence Transformers Documentation
- Repository: Sentence Transformers on GitHub
- Hugging Face: Sentence Transformers on Hugging Face
Full Model Architecture
SentenceTransformer(
(0): Transformer({'max_seq_length': 8192, 'do_lower_case': False}) with Transformer model: NewModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
(2): Normalize()
)
Usage
Direct Usage (Sentence Transformers)
First install the Sentence Transformers library:
pip install -U sentence-transformers
Then you can load this model and run inference.
from sentence_transformers import SentenceTransformer
# Download from the 🤗 Hub
model = SentenceTransformer("karsar/gte-multilingual-base-hu")
# Run inference
sentences = [
'Az emberek alszanak.',
'Egy apa és a fia ölelgeti alvás közben.',
'Egy csoport ember ül egy nyitott, térszerű területen, mögötte nagy bokrok és egy sor viktoriánus stílusú épület, melyek közül sokat a kép jobb oldalán lévő erős elmosódás tesz kivehetetlenné.',
]
embeddings = model.encode(sentences)
print(embeddings.shape)
# [3, 768]
# Get the similarity scores for the embeddings
similarities = model.similarity(embeddings, embeddings)
print(similarities.shape)
# [3, 3]
Evaluation
Metrics
Triplet
- Dataset:
all-nli-dev
- Evaluated with
TripletEvaluator
Metric | Value |
---|---|
cosine_accuracy | 0.9676 |
dot_accuracy | 0.0324 |
manhattan_accuracy | 0.9688 |
euclidean_accuracy | 0.9676 |
max_accuracy | 0.9688 |
Triplet
- Dataset:
all-nli-test
- Evaluated with
TripletEvaluator
Metric | Value |
---|---|
cosine_accuracy | 0.9718 |
dot_accuracy | 0.0282 |
manhattan_accuracy | 0.9726 |
euclidean_accuracy | 0.9718 |
max_accuracy | 0.9726 |
Training Details
Training Dataset
train
- Dataset: train
- Size: 1,044,013 training samples
- Columns:
anchor
,positive
, andnegative
- Approximate statistics based on the first 1000 samples:
anchor positive negative type string string string details - min: 7 tokens
- mean: 11.73 tokens
- max: 56 tokens
- min: 6 tokens
- mean: 15.24 tokens
- max: 47 tokens
- min: 7 tokens
- mean: 16.07 tokens
- max: 53 tokens
- Samples:
anchor positive negative Egy lóháton ülő ember átugrik egy lerombolt repülőgép felett.
Egy ember a szabadban, lóháton.
Egy ember egy étteremben van, és omlettet rendel.
Gyerekek mosolyogva és integetett a kamera
Gyermekek vannak jelen
A gyerekek homlokot rántanak
Egy fiú ugrál a gördeszkát a közepén egy piros híd.
A fiú gördeszkás trükköt csinál.
A fiú korcsolyázik a járdán.
- Loss:
MultipleNegativesRankingLoss
with these parameters:{ "scale": 20.0, "similarity_fct": "cos_sim" }
Evaluation Dataset
train
- Dataset: train
- Size: 5,000 evaluation samples
- Columns:
anchor
,positive
, andnegative
- Approximate statistics based on the first 1000 samples:
anchor positive negative type string string string details - min: 7 tokens
- mean: 11.73 tokens
- max: 56 tokens
- min: 6 tokens
- mean: 15.24 tokens
- max: 47 tokens
- min: 7 tokens
- mean: 16.07 tokens
- max: 53 tokens
- Samples:
anchor positive negative Egy lóháton ülő ember átugrik egy lerombolt repülőgép felett.
Egy ember a szabadban, lóháton.
Egy ember egy étteremben van, és omlettet rendel.
Gyerekek mosolyogva és integetett a kamera
Gyermekek vannak jelen
A gyerekek homlokot rántanak
Egy fiú ugrál a gördeszkát a közepén egy piros híd.
A fiú gördeszkás trükköt csinál.
A fiú korcsolyázik a járdán.
- Loss:
MultipleNegativesRankingLoss
with these parameters:{ "scale": 20.0, "similarity_fct": "cos_sim" }
Training Hyperparameters
Non-Default Hyperparameters
eval_strategy
: stepsnum_train_epochs
: 1warmup_ratio
: 0.1bf16
: Truebatch_sampler
: no_duplicates
All Hyperparameters
Click to expand
overwrite_output_dir
: Falsedo_predict
: Falseeval_strategy
: stepsprediction_loss_only
: Trueper_device_train_batch_size
: 8per_device_eval_batch_size
: 8per_gpu_train_batch_size
: Noneper_gpu_eval_batch_size
: Nonegradient_accumulation_steps
: 1eval_accumulation_steps
: Nonetorch_empty_cache_steps
: Nonelearning_rate
: 5e-05weight_decay
: 0.0adam_beta1
: 0.9adam_beta2
: 0.999adam_epsilon
: 1e-08max_grad_norm
: 1.0num_train_epochs
: 1max_steps
: -1lr_scheduler_type
: linearlr_scheduler_kwargs
: {}warmup_ratio
: 0.1warmup_steps
: 0log_level
: passivelog_level_replica
: warninglog_on_each_node
: Truelogging_nan_inf_filter
: Truesave_safetensors
: Truesave_on_each_node
: Falsesave_only_model
: Falserestore_callback_states_from_checkpoint
: Falseno_cuda
: Falseuse_cpu
: Falseuse_mps_device
: Falseseed
: 42data_seed
: Nonejit_mode_eval
: Falseuse_ipex
: Falsebf16
: Truefp16
: Falsefp16_opt_level
: O1half_precision_backend
: autobf16_full_eval
: Falsefp16_full_eval
: Falsetf32
: Nonelocal_rank
: 0ddp_backend
: Nonetpu_num_cores
: Nonetpu_metrics_debug
: Falsedebug
: []dataloader_drop_last
: Falsedataloader_num_workers
: 0dataloader_prefetch_factor
: Nonepast_index
: -1disable_tqdm
: Falseremove_unused_columns
: Truelabel_names
: Noneload_best_model_at_end
: Falseignore_data_skip
: Falsefsdp
: []fsdp_min_num_params
: 0fsdp_config
: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}fsdp_transformer_layer_cls_to_wrap
: Noneaccelerator_config
: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}deepspeed
: Nonelabel_smoothing_factor
: 0.0optim
: adamw_torchoptim_args
: Noneadafactor
: Falsegroup_by_length
: Falselength_column_name
: lengthddp_find_unused_parameters
: Noneddp_bucket_cap_mb
: Noneddp_broadcast_buffers
: Falsedataloader_pin_memory
: Truedataloader_persistent_workers
: Falseskip_memory_metrics
: Trueuse_legacy_prediction_loop
: Falsepush_to_hub
: Falseresume_from_checkpoint
: Nonehub_model_id
: Nonehub_strategy
: every_savehub_private_repo
: Falsehub_always_push
: Falsegradient_checkpointing
: Falsegradient_checkpointing_kwargs
: Noneinclude_inputs_for_metrics
: Falseeval_do_concat_batches
: Truefp16_backend
: autopush_to_hub_model_id
: Nonepush_to_hub_organization
: Nonemp_parameters
:auto_find_batch_size
: Falsefull_determinism
: Falsetorchdynamo
: Noneray_scope
: lastddp_timeout
: 1800torch_compile
: Falsetorch_compile_backend
: Nonetorch_compile_mode
: Nonedispatch_batches
: Nonesplit_batches
: Noneinclude_tokens_per_second
: Falseinclude_num_input_tokens_seen
: Falseneftune_noise_alpha
: Noneoptim_target_modules
: Nonebatch_eval_metrics
: Falseeval_on_start
: Falseeval_use_gather_object
: Falsebatch_sampler
: no_duplicatesmulti_dataset_batch_sampler
: proportional
Training Logs
Click to expand
Epoch | Step | Training Loss | train loss | all-nli-dev_max_accuracy | all-nli-test_max_accuracy |
---|---|---|---|---|---|
0 | 0 | - | - | 0.7578 | - |
0.0008 | 100 | 0.8531 | - | - | - |
0.0015 | 200 | 0.938 | - | - | - |
0.0023 | 300 | 0.8788 | - | - | - |
0.0031 | 400 | 0.9619 | - | - | - |
0.0038 | 500 | 0.9634 | - | - | - |
0.0046 | 600 | 1.0995 | - | - | - |
0.0054 | 700 | 0.8266 | - | - | - |
0.0061 | 800 | 0.8647 | - | - | - |
0.0069 | 900 | 0.8123 | - | - | - |
0.0077 | 1000 | 0.7149 | - | - | - |
0.0084 | 1100 | 0.8852 | - | - | - |
0.0092 | 1200 | 0.9001 | - | - | - |
0.0100 | 1300 | 0.8113 | - | - | - |
0.0107 | 1400 | 0.756 | - | - | - |
0.0115 | 1500 | 0.6729 | - | - | - |
0.0123 | 1600 | 0.5763 | - | - | - |
0.0130 | 1700 | 0.6413 | - | - | - |
0.0138 | 1800 | 1.0721 | - | - | - |
0.0146 | 1900 | 0.9248 | - | - | - |
0.0153 | 2000 | 0.9313 | 0.1873 | 0.9518 | - |
0.0161 | 2100 | 0.8165 | - | - | - |
0.0169 | 2200 | 0.7051 | - | - | - |
0.0176 | 2300 | 0.8373 | - | - | - |
0.0184 | 2400 | 0.8337 | - | - | - |
0.0192 | 2500 | 0.6224 | - | - | - |
0.0199 | 2600 | 0.4977 | - | - | - |
0.0207 | 2700 | 0.6843 | - | - | - |
0.0215 | 2800 | 0.4773 | - | - | - |
0.0222 | 2900 | 0.5113 | - | - | - |
0.0230 | 3000 | 0.2415 | - | - | - |
0.0238 | 3100 | 0.2441 | - | - | - |
0.0245 | 3200 | 0.3309 | - | - | - |
0.0253 | 3300 | 0.4765 | - | - | - |
0.0261 | 3400 | 0.4781 | - | - | - |
0.0268 | 3500 | 0.2184 | - | - | - |
0.0276 | 3600 | 0.3596 | - | - | - |
0.0284 | 3700 | 0.655 | - | - | - |
0.0291 | 3800 | 0.6108 | - | - | - |
0.0299 | 3900 | 0.4897 | - | - | - |
0.0307 | 4000 | 0.3217 | 0.3629 | 0.9146 | - |
0.0314 | 4100 | 0.2678 | - | - | - |
0.0322 | 4200 | 0.4772 | - | - | - |
0.0329 | 4300 | 0.46 | - | - | - |
0.0337 | 4400 | 0.3363 | - | - | - |
0.0345 | 4500 | 0.2244 | - | - | - |
0.0352 | 4600 | 0.2708 | - | - | - |
0.0360 | 4700 | 0.288 | - | - | - |
0.0368 | 4800 | 0.4095 | - | - | - |
0.0375 | 4900 | 0.3836 | - | - | - |
0.0383 | 5000 | 0.3999 | - | - | - |
0.0391 | 5100 | 0.2303 | - | - | - |
0.0398 | 5200 | 0.232 | - | - | - |
0.0406 | 5300 | 0.2001 | - | - | - |
0.0414 | 5400 | 0.2552 | - | - | - |
0.0421 | 5500 | 0.2658 | - | - | - |
0.0429 | 5600 | 0.3652 | - | - | - |
0.0437 | 5700 | 0.6644 | - | - | - |
0.0444 | 5800 | 0.4616 | - | - | - |
0.0452 | 5900 | 0.459 | - | - | - |
0.0460 | 6000 | 0.4053 | 0.6328 | 0.8806 | - |
0.0467 | 6100 | 0.3715 | - | - | - |
0.0475 | 6200 | 0.5301 | - | - | - |
0.0483 | 6300 | 0.4412 | - | - | - |
0.0490 | 6400 | 0.3733 | - | - | - |
0.0498 | 6500 | 0.4258 | - | - | - |
0.0506 | 6600 | 0.4896 | - | - | - |
0.0513 | 6700 | 0.4275 | - | - | - |
0.0521 | 6800 | 0.4419 | - | - | - |
0.0529 | 6900 | 0.4671 | - | - | - |
0.0536 | 7000 | 0.4209 | - | - | - |
0.0544 | 7100 | 0.406 | - | - | - |
0.0552 | 7200 | 0.3265 | - | - | - |
0.0559 | 7300 | 0.2712 | - | - | - |
0.0567 | 7400 | 0.3408 | - | - | - |
0.0575 | 7500 | 0.4078 | - | - | - |
0.0582 | 7600 | 0.3304 | - | - | - |
0.0590 | 7700 | 0.2874 | - | - | - |
0.0598 | 7800 | 0.357 | - | - | - |
0.0605 | 7900 | 0.3936 | - | - | - |
0.0613 | 8000 | 0.3239 | 0.5266 | 0.8706 | - |
0.0621 | 8100 | 0.3486 | - | - | - |
0.0628 | 8200 | 0.4123 | - | - | - |
0.0636 | 8300 | 0.7267 | - | - | - |
0.0644 | 8400 | 0.6765 | - | - | - |
0.0651 | 8500 | 0.7502 | - | - | - |
0.0659 | 8600 | 0.8435 | - | - | - |
0.0667 | 8700 | 0.4286 | - | - | - |
0.0674 | 8800 | 0.2898 | - | - | - |
0.0682 | 8900 | 0.4943 | - | - | - |
0.0690 | 9000 | 0.3998 | - | - | - |
0.0697 | 9100 | 0.4484 | - | - | - |
0.0705 | 9200 | 0.4421 | - | - | - |
0.0713 | 9300 | 0.3331 | - | - | - |
0.0720 | 9400 | 0.3354 | - | - | - |
0.0728 | 9500 | 0.5536 | - | - | - |
0.0736 | 9600 | 0.4695 | - | - | - |
0.0743 | 9700 | 0.4275 | - | - | - |
0.0751 | 9800 | 0.4075 | - | - | - |
0.0759 | 9900 | 0.5394 | - | - | - |
0.0766 | 10000 | 0.4852 | 0.4733 | 0.9202 | - |
0.0774 | 10100 | 0.3679 | - | - | - |
0.0782 | 10200 | 0.4251 | - | - | - |
0.0789 | 10300 | 0.262 | - | - | - |
0.0797 | 10400 | 0.384 | - | - | - |
0.0805 | 10500 | 0.3438 | - | - | - |
0.0812 | 10600 | 0.3618 | - | - | - |
0.0820 | 10700 | 0.4057 | - | - | - |
0.0828 | 10800 | 0.5303 | - | - | - |
0.0835 | 10900 | 0.5121 | - | - | - |
0.0843 | 11000 | 0.4173 | - | - | - |
0.0851 | 11100 | 0.409 | - | - | - |
0.0858 | 11200 | 0.6285 | - | - | - |
0.0866 | 11300 | 0.5373 | - | - | - |
0.0874 | 11400 | 0.3423 | - | - | - |
0.0881 | 11500 | 0.5681 | - | - | - |
0.0889 | 11600 | 0.4172 | - | - | - |
0.0897 | 11700 | 0.5511 | - | - | - |
0.0904 | 11800 | 0.4482 | - | - | - |
0.0912 | 11900 | 0.5888 | - | - | - |
0.0920 | 12000 | 0.4315 | 0.8177 | 0.8496 | - |
0.0927 | 12100 | 0.5085 | - | - | - |
0.0935 | 12200 | 0.7179 | - | - | - |
0.0943 | 12300 | 0.72 | - | - | - |
0.0950 | 12400 | 0.4522 | - | - | - |
0.0958 | 12500 | 0.6524 | - | - | - |
0.0966 | 12600 | 0.5518 | - | - | - |
0.0973 | 12700 | 0.5112 | - | - | - |
0.0981 | 12800 | 0.4752 | - | - | - |
0.0988 | 12900 | 0.4075 | - | - | - |
0.0996 | 13000 | 0.7106 | - | - | - |
0.1004 | 13100 | 0.7369 | - | - | - |
0.1011 | 13200 | 0.6002 | - | - | - |
0.1019 | 13300 | 0.3983 | - | - | - |
0.1027 | 13400 | 0.4522 | - | - | - |
0.1034 | 13500 | 0.5373 | - | - | - |
0.1042 | 13600 | 0.6317 | - | - | - |
0.1050 | 13700 | 0.4904 | - | - | - |
0.1057 | 13800 | 0.5027 | - | - | - |
0.1065 | 13900 | 0.4386 | - | - | - |
0.1073 | 14000 | 0.571 | 0.4533 | 0.9182 | - |
0.1080 | 14100 | 0.4935 | - | - | - |
0.1088 | 14200 | 0.494 | - | - | - |
0.1096 | 14300 | 0.7545 | - | - | - |
0.1103 | 14400 | 0.64 | - | - | - |
0.1111 | 14500 | 0.7364 | - | - | - |
0.1119 | 14600 | 0.5552 | - | - | - |
0.1126 | 14700 | 0.4282 | - | - | - |
0.1134 | 14800 | 0.8343 | - | - | - |
0.1142 | 14900 | 0.5264 | - | - | - |
0.1149 | 15000 | 0.2682 | - | - | - |
0.1157 | 15100 | 0.4011 | - | - | - |
0.1165 | 15200 | 0.39 | - | - | - |
0.1172 | 15300 | 0.2813 | - | - | - |
0.1180 | 15400 | 0.3316 | - | - | - |
0.1188 | 15500 | 0.2424 | - | - | - |
0.1195 | 15600 | 0.3001 | - | - | - |
0.1203 | 15700 | 0.2728 | - | - | - |
0.1211 | 15800 | 0.366 | - | - | - |
0.1218 | 15900 | 0.4103 | - | - | - |
0.1226 | 16000 | 0.1548 | 0.7131 | 0.876 | - |
0.1234 | 16100 | 0.3768 | - | - | - |
0.1241 | 16200 | 0.6659 | - | - | - |
0.1249 | 16300 | 0.5738 | - | - | - |
0.1257 | 16400 | 0.4899 | - | - | - |
0.1264 | 16500 | 0.2281 | - | - | - |
0.1272 | 16600 | 0.2406 | - | - | - |
0.1280 | 16700 | 0.3569 | - | - | - |
0.1287 | 16800 | 0.3862 | - | - | - |
0.1295 | 16900 | 0.3531 | - | - | - |
0.1303 | 17000 | 0.1497 | - | - | - |
0.1310 | 17100 | 0.2125 | - | - | - |
0.1318 | 17200 | 0.3563 | - | - | - |
0.1326 | 17300 | 0.4138 | - | - | - |
0.1333 | 17400 | 0.3434 | - | - | - |
0.1341 | 17500 | 0.351 | - | - | - |
0.1349 | 17600 | 0.1777 | - | - | - |
0.1356 | 17700 | 0.2335 | - | - | - |
0.1364 | 17800 | 0.1479 | - | - | - |
0.1372 | 17900 | 0.2382 | - | - | - |
0.1379 | 18000 | 0.2306 | 0.5838 | 0.898 | - |
0.1387 | 18100 | 0.3028 | - | - | - |
0.1395 | 18200 | 0.6886 | - | - | - |
0.1402 | 18300 | 0.4118 | - | - | - |
0.1410 | 18400 | 0.463 | - | - | - |
0.1418 | 18500 | 0.3672 | - | - | - |
0.1425 | 18600 | 0.2931 | - | - | - |
0.1433 | 18700 | 0.4141 | - | - | - |
0.1441 | 18800 | 0.3775 | - | - | - |
0.1448 | 18900 | 0.297 | - | - | - |
0.1456 | 19000 | 0.3659 | - | - | - |
0.1464 | 19100 | 0.4638 | - | - | - |
0.1471 | 19200 | 0.4008 | - | - | - |
0.1479 | 19300 | 0.344 | - | - | - |
0.1487 | 19400 | 0.3902 | - | - | - |
0.1494 | 19500 | 0.3392 | - | - | - |
0.1502 | 19600 | 0.4313 | - | - | - |
0.1510 | 19700 | 0.2827 | - | - | - |
0.1517 | 19800 | 0.2602 | - | - | - |
0.1525 | 19900 | 0.2954 | - | - | - |
0.1533 | 20000 | 0.3626 | 0.3532 | 0.9126 | - |
0.1540 | 20100 | 0.3205 | - | - | - |
0.1548 | 20200 | 0.2095 | - | - | - |
0.1556 | 20300 | 0.2758 | - | - | - |
0.1563 | 20400 | 0.3855 | - | - | - |
0.1571 | 20500 | 0.3173 | - | - | - |
0.1579 | 20600 | 0.2858 | - | - | - |
0.1586 | 20700 | 0.3655 | - | - | - |
0.1594 | 20800 | 0.5513 | - | - | - |
0.1602 | 20900 | 0.4995 | - | - | - |
0.1609 | 21000 | 0.5949 | - | - | - |
0.1617 | 21100 | 0.7629 | - | - | - |
0.1624 | 21200 | 0.3139 | - | - | - |
0.1632 | 21300 | 0.1827 | - | - | - |
0.1640 | 21400 | 0.4238 | - | - | - |
0.1647 | 21500 | 0.311 | - | - | - |
0.1655 | 21600 | 0.3881 | - | - | - |
0.1663 | 21700 | 0.4073 | - | - | - |
0.1670 | 21800 | 0.2609 | - | - | - |
0.1678 | 21900 | 0.2442 | - | - | - |
0.1686 | 22000 | 0.4434 | 0.3622 | 0.9238 | - |
0.1693 | 22100 | 0.3899 | - | - | - |
0.1701 | 22200 | 0.3822 | - | - | - |
0.1709 | 22300 | 0.2892 | - | - | - |
0.1716 | 22400 | 0.4078 | - | - | - |
0.1724 | 22500 | 0.3758 | - | - | - |
0.1732 | 22600 | 0.2714 | - | - | - |
0.1739 | 22700 | 0.304 | - | - | - |
0.1747 | 22800 | 0.2074 | - | - | - |
0.1755 | 22900 | 0.2447 | - | - | - |
0.1762 | 23000 | 0.2148 | - | - | - |
0.1770 | 23100 | 0.2565 | - | - | - |
0.1778 | 23200 | 0.3164 | - | - | - |
0.1785 | 23300 | 0.4486 | - | - | - |
0.1793 | 23400 | 0.4001 | - | - | - |
0.1801 | 23500 | 0.3374 | - | - | - |
0.1808 | 23600 | 0.2576 | - | - | - |
0.1816 | 23700 | 0.4531 | - | - | - |
0.1824 | 23800 | 0.3501 | - | - | - |
0.1831 | 23900 | 0.2755 | - | - | - |
0.1839 | 24000 | 0.4571 | 0.5006 | 0.9296 | - |
0.1847 | 24100 | 0.3371 | - | - | - |
0.1854 | 24200 | 0.4287 | - | - | - |
0.1862 | 24300 | 0.3217 | - | - | - |
0.1870 | 24400 | 0.3464 | - | - | - |
0.1877 | 24500 | 0.3257 | - | - | - |
0.1885 | 24600 | 0.3412 | - | - | - |
0.1893 | 24700 | 0.569 | - | - | - |
0.1900 | 24800 | 0.4851 | - | - | - |
0.1908 | 24900 | 0.2667 | - | - | - |
0.1916 | 25000 | 0.5093 | - | - | - |
0.1923 | 25100 | 0.3305 | - | - | - |
0.1931 | 25200 | 0.3199 | - | - | - |
0.1939 | 25300 | 0.3103 | - | - | - |
0.1946 | 25400 | 0.3189 | - | - | - |
0.1954 | 25500 | 0.6199 | - | - | - |
0.1962 | 25600 | 0.6001 | - | - | - |
0.1969 | 25700 | 0.416 | - | - | - |
0.1977 | 25800 | 0.2765 | - | - | - |
0.1985 | 25900 | 0.3523 | - | - | - |
0.1992 | 26000 | 0.4098 | 0.3070 | 0.961 | - |
0.2000 | 26100 | 0.3526 | - | - | - |
0.2008 | 26200 | 0.3409 | - | - | - |
0.2015 | 26300 | 0.2826 | - | - | - |
0.2023 | 26400 | 0.3161 | - | - | - |
0.2031 | 26500 | 0.3768 | - | - | - |
0.2038 | 26600 | 0.2398 | - | - | - |
0.2046 | 26700 | 0.3281 | - | - | - |
0.2054 | 26800 | 0.5103 | - | - | - |
0.2061 | 26900 | 0.3619 | - | - | - |
0.2069 | 27000 | 0.4818 | - | - | - |
0.2077 | 27100 | 0.3793 | - | - | - |
0.2084 | 27200 | 0.3713 | - | - | - |
0.2092 | 27300 | 0.5628 | - | - | - |
0.2100 | 27400 | 0.4162 | - | - | - |
0.2107 | 27500 | 0.1791 | - | - | - |
0.2115 | 27600 | 0.2212 | - | - | - |
0.2123 | 27700 | 0.227 | - | - | - |
0.2130 | 27800 | 0.1547 | - | - | - |
0.2138 | 27900 | 0.1532 | - | - | - |
0.2146 | 28000 | 0.1684 | 0.2016 | 0.9732 | - |
0.2153 | 28100 | 0.1512 | - | - | - |
0.2161 | 28200 | 0.1525 | - | - | - |
0.2169 | 28300 | 0.2272 | - | - | - |
0.2176 | 28400 | 0.3624 | - | - | - |
0.2184 | 28500 | 0.1039 | - | - | - |
0.2192 | 28600 | 0.2833 | - | - | - |
0.2199 | 28700 | 0.5507 | - | - | - |
0.2207 | 28800 | 0.3969 | - | - | - |
0.2215 | 28900 | 0.3477 | - | - | - |
0.2222 | 29000 | 0.135 | - | - | - |
0.2230 | 29100 | 0.1454 | - | - | - |
0.2238 | 29200 | 0.2475 | - | - | - |
0.2245 | 29300 | 0.2538 | - | - | - |
0.2253 | 29400 | 0.2197 | - | - | - |
0.2261 | 29500 | 0.057 | - | - | - |
0.2268 | 29600 | 0.1312 | - | - | - |
0.2276 | 29700 | 0.213 | - | - | - |
0.2283 | 29800 | 0.3195 | - | - | - |
0.2291 | 29900 | 0.2358 | - | - | - |
0.2299 | 30000 | 0.273 | 0.2934 | 0.9392 | - |
0.2306 | 30100 | 0.1181 | - | - | - |
0.2314 | 30200 | 0.1874 | - | - | - |
0.2322 | 30300 | 0.0743 | - | - | - |
0.2329 | 30400 | 0.1617 | - | - | - |
0.2337 | 30500 | 0.1573 | - | - | - |
0.2345 | 30600 | 0.141 | - | - | - |
0.2352 | 30700 | 0.4947 | - | - | - |
0.2360 | 30800 | 0.2698 | - | - | - |
0.2368 | 30900 | 0.2668 | - | - | - |
0.2375 | 31000 | 0.1834 | - | - | - |
0.2383 | 31100 | 0.1813 | - | - | - |
0.2391 | 31200 | 0.2274 | - | - | - |
0.2398 | 31300 | 0.2553 | - | - | - |
0.2406 | 31400 | 0.2441 | - | - | - |
0.2414 | 31500 | 0.2376 | - | - | - |
0.2421 | 31600 | 0.366 | - | - | - |
0.2429 | 31700 | 0.3248 | - | - | - |
0.2437 | 31800 | 0.2314 | - | - | - |
0.2444 | 31900 | 0.2665 | - | - | - |
0.2452 | 32000 | 0.2388 | 0.1915 | 0.9654 | - |
0.2460 | 32100 | 0.2911 | - | - | - |
0.2467 | 32200 | 0.1602 | - | - | - |
0.2475 | 32300 | 0.1294 | - | - | - |
0.2483 | 32400 | 0.2687 | - | - | - |
0.2490 | 32500 | 0.2579 | - | - | - |
0.2498 | 32600 | 0.1988 | - | - | - |
0.2506 | 32700 | 0.1212 | - | - | - |
0.2513 | 32800 | 0.2145 | - | - | - |
0.2521 | 32900 | 0.2485 | - | - | - |
0.2529 | 33000 | 0.2353 | - | - | - |
0.2536 | 33100 | 0.1729 | - | - | - |
0.2544 | 33200 | 0.2498 | - | - | - |
0.2552 | 33300 | 0.3091 | - | - | - |
0.2559 | 33400 | 0.252 | - | - | - |
0.2567 | 33500 | 0.3321 | - | - | - |
0.2575 | 33600 | 0.5145 | - | - | - |
0.2582 | 33700 | 0.2102 | - | - | - |
0.2590 | 33800 | 0.0869 | - | - | - |
0.2598 | 33900 | 0.2779 | - | - | - |
0.2605 | 34000 | 0.1935 | 0.1556 | 0.9716 | - |
0.2613 | 34100 | 0.2646 | - | - | - |
0.2621 | 34200 | 0.2464 | - | - | - |
0.2628 | 34300 | 0.214 | - | - | - |
0.2636 | 34400 | 0.1875 | - | - | - |
0.2644 | 34500 | 0.3016 | - | - | - |
0.2651 | 34600 | 0.2721 | - | - | - |
0.2659 | 34700 | 0.215 | - | - | - |
0.2667 | 34800 | 0.1895 | - | - | - |
0.2674 | 34900 | 0.2684 | - | - | - |
0.2682 | 35000 | 0.2721 | - | - | - |
0.2690 | 35100 | 0.1945 | - | - | - |
0.2697 | 35200 | 0.1581 | - | - | - |
0.2705 | 35300 | 0.1269 | - | - | - |
0.2713 | 35400 | 0.2101 | - | - | - |
0.2720 | 35500 | 0.1388 | - | - | - |
0.2728 | 35600 | 0.1664 | - | - | - |
0.2736 | 35700 | 0.1861 | - | - | - |
0.2743 | 35800 | 0.3073 | - | - | - |
0.2751 | 35900 | 0.2723 | - | - | - |
0.2759 | 36000 | 0.2002 | 0.1500 | 0.9746 | - |
0.2766 | 36100 | 0.1583 | - | - | - |
0.2774 | 36200 | 0.2918 | - | - | - |
0.2782 | 36300 | 0.1913 | - | - | - |
0.2789 | 36400 | 0.1701 | - | - | - |
0.2797 | 36500 | 0.3122 | - | - | - |
0.2805 | 36600 | 0.2068 | - | - | - |
0.2812 | 36700 | 0.2807 | - | - | - |
0.2820 | 36800 | 0.2398 | - | - | - |
0.2828 | 36900 | 0.2264 | - | - | - |
0.2835 | 37000 | 0.1756 | - | - | - |
0.2843 | 37100 | 0.2027 | - | - | - |
0.2851 | 37200 | 0.4277 | - | - | - |
0.2858 | 37300 | 0.3126 | - | - | - |
0.2866 | 37400 | 0.1836 | - | - | - |
0.2874 | 37500 | 0.3447 | - | - | - |
0.2881 | 37600 | 0.1742 | - | - | - |
0.2889 | 37700 | 0.2391 | - | - | - |
0.2897 | 37800 | 0.1672 | - | - | - |
0.2904 | 37900 | 0.2821 | - | - | - |
0.2912 | 38000 | 0.3924 | 0.2273 | 0.9704 | - |
0.2919 | 38100 | 0.3842 | - | - | - |
0.2927 | 38200 | 0.3022 | - | - | - |
0.2935 | 38300 | 0.0748 | - | - | - |
0.2942 | 38400 | 0.2131 | - | - | - |
0.2950 | 38500 | 0.1604 | - | - | - |
0.2958 | 38600 | 0.1645 | - | - | - |
0.2965 | 38700 | 0.1753 | - | - | - |
0.2973 | 38800 | 0.0634 | - | - | - |
0.2981 | 38900 | 0.1199 | - | - | - |
0.2988 | 39000 | 0.1586 | - | - | - |
0.2996 | 39100 | 0.1119 | - | - | - |
0.3004 | 39200 | 0.106 | - | - | - |
0.3011 | 39300 | 0.2754 | - | - | - |
0.3019 | 39400 | 0.2172 | - | - | - |
0.3027 | 39500 | 0.2081 | - | - | - |
0.3034 | 39600 | 0.1237 | - | - | - |
0.3042 | 39700 | 0.1699 | - | - | - |
0.3050 | 39800 | 0.3101 | - | - | - |
0.3057 | 39900 | 0.2217 | - | - | - |
0.3065 | 40000 | 0.0641 | 0.1541 | 0.9764 | - |
0.3073 | 40100 | 0.1466 | - | - | - |
0.3080 | 40200 | 0.1468 | - | - | - |
0.3088 | 40300 | 0.0891 | - | - | - |
0.3096 | 40400 | 0.0694 | - | - | - |
0.3103 | 40500 | 0.0993 | - | - | - |
0.3111 | 40600 | 0.0895 | - | - | - |
0.3119 | 40700 | 0.1036 | - | - | - |
0.3126 | 40800 | 0.1358 | - | - | - |
0.3134 | 40900 | 0.1809 | - | - | - |
0.3142 | 41000 | 0.0739 | - | - | - |
0.3149 | 41100 | 0.1942 | - | - | - |
0.3157 | 41200 | 0.5035 | - | - | - |
0.3165 | 41300 | 0.1967 | - | - | - |
0.3172 | 41400 | 0.2337 | - | - | - |
0.3180 | 41500 | 0.0589 | - | - | - |
0.3188 | 41600 | 0.0559 | - | - | - |
0.3195 | 41700 | 0.1349 | - | - | - |
0.3203 | 41800 | 0.1641 | - | - | - |
0.3211 | 41900 | 0.1014 | - | - | - |
0.3218 | 42000 | 0.0307 | 0.1494 | 0.9808 | - |
0.3226 | 42100 | 0.0804 | - | - | - |
0.3234 | 42200 | 0.1525 | - | - | - |
0.3241 | 42300 | 0.217 | - | - | - |
0.3249 | 42400 | 0.1217 | - | - | - |
0.3257 | 42500 | 0.1793 | - | - | - |
0.3264 | 42600 | 0.0749 | - | - | - |
0.3272 | 42700 | 0.1164 | - | - | - |
0.3280 | 42800 | 0.0354 | - | - | - |
0.3287 | 42900 | 0.0907 | - | - | - |
0.3295 | 43000 | 0.0859 | - | - | - |
0.3303 | 43100 | 0.0452 | - | - | - |
0.3310 | 43200 | 0.2408 | - | - | - |
0.3318 | 43300 | 0.1326 | - | - | - |
0.3326 | 43400 | 0.1982 | - | - | - |
0.3333 | 43500 | 0.0987 | - | - | - |
0.3341 | 43600 | 0.1097 | - | - | - |
0.3349 | 43700 | 0.1461 | - | - | - |
0.3356 | 43800 | 0.1902 | - | - | - |
0.3364 | 43900 | 0.1091 | - | - | - |
0.3372 | 44000 | 0.1655 | 0.2016 | 0.9634 | - |
0.3379 | 44100 | 0.2503 | - | - | - |
0.3387 | 44200 | 0.2033 | - | - | - |
0.3395 | 44300 | 0.1312 | - | - | - |
0.3402 | 44400 | 0.175 | - | - | - |
0.3410 | 44500 | 0.1357 | - | - | - |
0.3418 | 44600 | 0.1589 | - | - | - |
0.3425 | 44700 | 0.1093 | - | - | - |
0.3433 | 44800 | 0.0593 | - | - | - |
0.3441 | 44900 | 0.14 | - | - | - |
0.3448 | 45000 | 0.1669 | - | - | - |
0.3456 | 45100 | 0.0919 | - | - | - |
0.3464 | 45200 | 0.0479 | - | - | - |
0.3471 | 45300 | 0.1151 | - | - | - |
0.3479 | 45400 | 0.1353 | - | - | - |
0.3487 | 45500 | 0.1457 | - | - | - |
0.3494 | 45600 | 0.0952 | - | - | - |
0.3502 | 45700 | 0.149 | - | - | - |
0.3510 | 45800 | 0.1253 | - | - | - |
0.3517 | 45900 | 0.1249 | - | - | - |
0.3525 | 46000 | 0.1592 | 0.1187 | 0.98 | - |
0.3533 | 46100 | 0.3452 | - | - | - |
0.3540 | 46200 | 0.1351 | - | - | - |
0.3548 | 46300 | 0.0551 | - | - | - |
0.3556 | 46400 | 0.1676 | - | - | - |
0.3563 | 46500 | 0.1227 | - | - | - |
0.3571 | 46600 | 0.1381 | - | - | - |
0.3578 | 46700 | 0.177 | - | - | - |
0.3586 | 46800 | 0.1239 | - | - | - |
0.3594 | 46900 | 0.1014 | - | - | - |
0.3601 | 47000 | 0.1724 | - | - | - |
0.3609 | 47100 | 0.1838 | - | - | - |
0.3617 | 47200 | 0.1259 | - | - | - |
0.3624 | 47300 | 0.1161 | - | - | - |
0.3632 | 47400 | 0.1746 | - | - | - |
0.3640 | 47500 | 0.1764 | - | - | - |
0.3647 | 47600 | 0.1176 | - | - | - |
0.3655 | 47700 | 0.1461 | - | - | - |
0.3663 | 47800 | 0.0837 | - | - | - |
0.3670 | 47900 | 0.0984 | - | - | - |
0.3678 | 48000 | 0.0783 | 0.1882 | 0.9822 | - |
0.3686 | 48100 | 0.1031 | - | - | - |
0.3693 | 48200 | 0.1257 | - | - | - |
0.3701 | 48300 | 0.1874 | - | - | - |
0.3709 | 48400 | 0.1645 | - | - | - |
0.3716 | 48500 | 0.1352 | - | - | - |
0.3724 | 48600 | 0.1158 | - | - | - |
0.3732 | 48700 | 0.1724 | - | - | - |
0.3739 | 48800 | 0.0974 | - | - | - |
0.3747 | 48900 | 0.0827 | - | - | - |
0.3755 | 49000 | 0.2194 | - | - | - |
0.3762 | 49100 | 0.1576 | - | - | - |
0.3770 | 49200 | 0.1702 | - | - | - |
0.3778 | 49300 | 0.151 | - | - | - |
0.3785 | 49400 | 0.1416 | - | - | - |
0.3793 | 49500 | 0.1263 | - | - | - |
0.3801 | 49600 | 0.1186 | - | - | - |
0.3808 | 49700 | 0.241 | - | - | - |
0.3816 | 49800 | 0.1737 | - | - | - |
0.3824 | 49900 | 0.122 | - | - | - |
0.3831 | 50000 | 0.2243 | 0.0826 | 0.986 | - |
0.3839 | 50100 | 0.077 | - | - | - |
0.3847 | 50200 | 0.1728 | - | - | - |
0.3854 | 50300 | 0.0942 | - | - | - |
0.3862 | 50400 | 0.1689 | - | - | - |
0.3870 | 50500 | 0.2525 | - | - | - |
0.3877 | 50600 | 0.2081 | - | - | - |
0.3885 | 50700 | 0.0778 | - | - | - |
0.3893 | 50800 | 0.0302 | - | - | - |
0.3900 | 50900 | 0.0821 | - | - | - |
0.3908 | 51000 | 0.0442 | - | - | - |
0.3916 | 51100 | 0.0426 | - | - | - |
0.3923 | 51200 | 0.0611 | - | - | - |
0.3931 | 51300 | 0.0078 | - | - | - |
0.3939 | 51400 | 0.0823 | - | - | - |
0.3946 | 51500 | 0.1089 | - | - | - |
0.3954 | 51600 | 0.0427 | - | - | - |
0.3962 | 51700 | 0.0808 | - | - | - |
0.3969 | 51800 | 0.1833 | - | - | - |
0.3977 | 51900 | 0.1553 | - | - | - |
0.3985 | 52000 | 0.1002 | 0.2288 | 0.9742 | - |
0.3992 | 52100 | 0.0833 | - | - | - |
0.4000 | 52200 | 0.1126 | - | - | - |
0.4008 | 52300 | 0.1907 | - | - | - |
0.4015 | 52400 | 0.1434 | - | - | - |
0.4023 | 52500 | 0.0357 | - | - | - |
0.4031 | 52600 | 0.1061 | - | - | - |
0.4038 | 52700 | 0.0674 | - | - | - |
0.4046 | 52800 | 0.056 | - | - | - |
0.4054 | 52900 | 0.0328 | - | - | - |
0.4061 | 53000 | 0.0457 | - | - | - |
0.4069 | 53100 | 0.0608 | - | - | - |
0.4077 | 53200 | 0.0311 | - | - | - |
0.4084 | 53300 | 0.0597 | - | - | - |
0.4092 | 53400 | 0.0809 | - | - | - |
0.4100 | 53500 | 0.0371 | - | - | - |
0.4107 | 53600 | 0.1224 | - | - | - |
0.4115 | 53700 | 0.3256 | - | - | - |
0.4123 | 53800 | 0.1202 | - | - | - |
0.4130 | 53900 | 0.1193 | - | - | - |
0.4138 | 54000 | 0.0382 | 0.1721 | 0.9852 | - |
0.4146 | 54100 | 0.0395 | - | - | - |
0.4153 | 54200 | 0.1023 | - | - | - |
0.4161 | 54300 | 0.0929 | - | - | - |
0.4169 | 54400 | 0.0419 | - | - | - |
0.4176 | 54500 | 0.0178 | - | - | - |
0.4184 | 54600 | 0.0398 | - | - | - |
0.4192 | 54700 | 0.0949 | - | - | - |
0.4199 | 54800 | 0.1276 | - | - | - |
0.4207 | 54900 | 0.0598 | - | - | - |
0.4214 | 55000 | 0.1563 | - | - | - |
0.4222 | 55100 | 0.0404 | - | - | - |
0.4230 | 55200 | 0.0684 | - | - | - |
0.4237 | 55300 | 0.0203 | - | - | - |
0.4245 | 55400 | 0.0499 | - | - | - |
0.4253 | 55500 | 0.0574 | - | - | - |
0.4260 | 55600 | 0.0175 | - | - | - |
0.4268 | 55700 | 0.1218 | - | - | - |
0.4276 | 55800 | 0.0674 | - | - | - |
0.4283 | 55900 | 0.0784 | - | - | - |
0.4291 | 56000 | 0.0509 | 2.1277 | 0.861 | - |
0.4299 | 56100 | 0.0557 | - | - | - |
0.4306 | 56200 | 0.0875 | - | - | - |
0.4314 | 56300 | 0.089 | - | - | - |
0.4322 | 56400 | 0.081 | - | - | - |
0.4329 | 56500 | 0.051 | - | - | - |
0.4337 | 56600 | 0.1965 | - | - | - |
0.4345 | 56700 | 0.1703 | - | - | - |
0.4352 | 56800 | 1.0721 | - | - | - |
0.4360 | 56900 | 0.9794 | - | - | - |
0.4368 | 57000 | 1.0224 | - | - | - |
0.4375 | 57100 | 1.0802 | - | - | - |
0.4383 | 57200 | 1.0966 | - | - | - |
0.4391 | 57300 | 1.0528 | - | - | - |
0.4398 | 57400 | 1.0647 | - | - | - |
0.4406 | 57500 | 0.9738 | - | - | - |
0.4414 | 57600 | 1.0226 | - | - | - |
0.4421 | 57700 | 1.0012 | - | - | - |
0.4429 | 57800 | 1.0331 | - | - | - |
0.4437 | 57900 | 0.9854 | - | - | - |
0.4444 | 58000 | 1.0047 | 0.0744 | 0.9918 | - |
0.4452 | 58100 | 0.9554 | - | - | - |
0.4460 | 58200 | 0.9855 | - | - | - |
0.4467 | 58300 | 0.9454 | - | - | - |
0.4475 | 58400 | 0.9855 | - | - | - |
0.4483 | 58500 | 0.9331 | - | - | - |
0.4490 | 58600 | 0.933 | - | - | - |
0.4498 | 58700 | 0.9431 | - | - | - |
0.4506 | 58800 | 0.9434 | - | - | - |
0.4513 | 58900 | 0.9583 | - | - | - |
0.4521 | 59000 | 0.9204 | - | - | - |
0.4529 | 59100 | 0.9574 | - | - | - |
0.4536 | 59200 | 0.9714 | - | - | - |
0.4544 | 59300 | 0.9263 | - | - | - |
0.4552 | 59400 | 0.8828 | - | - | - |
0.4559 | 59500 | 0.9343 | - | - | - |
0.4567 | 59600 | 0.8743 | - | - | - |
0.4575 | 59700 | 0.9266 | - | - | - |
0.4582 | 59800 | 0.9097 | - | - | - |
0.4590 | 59900 | 0.9303 | - | - | - |
0.4598 | 60000 | 0.9452 | 0.1016 | 0.988 | - |
0.4605 | 60100 | 0.9241 | - | - | - |
0.4613 | 60200 | 0.8899 | - | - | - |
0.4621 | 60300 | 0.9122 | - | - | - |
0.4628 | 60400 | 0.8831 | - | - | - |
0.4636 | 60500 | 0.8785 | - | - | - |
0.4644 | 60600 | 0.9588 | - | - | - |
0.4651 | 60700 | 0.9349 | - | - | - |
0.4659 | 60800 | 1.0211 | - | - | - |
0.4667 | 60900 | 1.0755 | - | - | - |
0.4674 | 61000 | 1.0176 | - | - | - |
0.4682 | 61100 | 1.0608 | - | - | - |
0.4690 | 61200 | 1.0493 | - | - | - |
0.4697 | 61300 | 1.0761 | - | - | - |
0.4705 | 61400 | 1.0182 | - | - | - |
0.4713 | 61500 | 1.0641 | - | - | - |
0.4720 | 61600 | 1.0029 | - | - | - |
0.4728 | 61700 | 1.0532 | - | - | - |
0.4736 | 61800 | 0.9898 | - | - | - |
0.4743 | 61900 | 1.0159 | - | - | - |
0.4751 | 62000 | 1.0474 | 0.1205 | 0.9816 | - |
0.4759 | 62100 | 1.0041 | - | - | - |
0.4766 | 62200 | 1.0088 | - | - | - |
0.4774 | 62300 | 0.9934 | - | - | - |
0.4782 | 62400 | 0.9959 | - | - | - |
0.4789 | 62500 | 1.0032 | - | - | - |
0.4797 | 62600 | 1.0464 | - | - | - |
0.4805 | 62700 | 0.9998 | - | - | - |
0.4812 | 62800 | 1.0052 | - | - | - |
0.4820 | 62900 | 1.0199 | - | - | - |
0.4828 | 63000 | 1.0047 | - | - | - |
0.4835 | 63100 | 1.0236 | - | - | - |
0.4843 | 63200 | 1.0041 | - | - | - |
0.4851 | 63300 | 1.0608 | - | - | - |
0.4858 | 63400 | 1.0167 | - | - | - |
0.4866 | 63500 | 0.9983 | - | - | - |
0.4873 | 63600 | 1.0408 | - | - | - |
0.4881 | 63700 | 1.0163 | - | - | - |
0.4889 | 63800 | 0.9447 | - | - | - |
0.4896 | 63900 | 1.0192 | - | - | - |
0.4904 | 64000 | 1.0316 | 0.1436 | 0.9766 | - |
0.4912 | 64100 | 1.0069 | - | - | - |
0.4919 | 64200 | 0.9909 | - | - | - |
0.4927 | 64300 | 1.0286 | - | - | - |
0.4935 | 64400 | 1.0212 | - | - | - |
0.4942 | 64500 | 1.0155 | - | - | - |
0.4950 | 64600 | 0.9988 | - | - | - |
0.4958 | 64700 | 0.9923 | - | - | - |
0.4965 | 64800 | 0.9713 | - | - | - |
0.4973 | 64900 | 1.0062 | - | - | - |
0.4981 | 65000 | 1.013 | - | - | - |
0.4988 | 65100 | 1.0055 | - | - | - |
0.4996 | 65200 | 0.9807 | - | - | - |
0.5004 | 65300 | 0.9428 | - | - | - |
0.5011 | 65400 | 0.9476 | - | - | - |
0.5019 | 65500 | 0.9222 | - | - | - |
0.5027 | 65600 | 0.9663 | - | - | - |
0.5034 | 65700 | 0.9706 | - | - | - |
0.5042 | 65800 | 0.9639 | - | - | - |
0.5050 | 65900 | 0.963 | - | - | - |
0.5057 | 66000 | 0.9782 | 0.1326 | 0.9764 | - |
0.5065 | 66100 | 0.9537 | - | - | - |
0.5073 | 66200 | 1.0072 | - | - | - |
0.5080 | 66300 | 0.9767 | - | - | - |
0.5088 | 66400 | 0.9792 | - | - | - |
0.5096 | 66500 | 0.9615 | - | - | - |
0.5103 | 66600 | 0.983 | - | - | - |
0.5111 | 66700 | 0.9542 | - | - | - |
0.5119 | 66800 | 0.9687 | - | - | - |
0.5126 | 66900 | 0.9659 | - | - | - |
0.5134 | 67000 | 0.973 | - | - | - |
0.5142 | 67100 | 0.9895 | - | - | - |
0.5149 | 67200 | 0.9716 | - | - | - |
0.5157 | 67300 | 0.9161 | - | - | - |
0.5165 | 67400 | 0.9851 | - | - | - |
0.5172 | 67500 | 1.0032 | - | - | - |
0.5180 | 67600 | 0.9414 | - | - | - |
0.5188 | 67700 | 0.9801 | - | - | - |
0.5195 | 67800 | 0.9798 | - | - | - |
0.5203 | 67900 | 0.9632 | - | - | - |
0.5211 | 68000 | 0.9715 | 0.1746 | 0.9674 | - |
0.5218 | 68100 | 0.9983 | - | - | - |
0.5226 | 68200 | 0.9747 | - | - | - |
0.5234 | 68300 | 0.9685 | - | - | - |
0.5241 | 68400 | 1.011 | - | - | - |
0.5249 | 68500 | 0.9542 | - | - | - |
0.5257 | 68600 | 0.9662 | - | - | - |
0.5264 | 68700 | 0.9838 | - | - | - |
0.5272 | 68800 | 0.9755 | - | - | - |
0.5280 | 68900 | 0.9346 | - | - | - |
0.5287 | 69000 | 0.9348 | - | - | - |
0.5295 | 69100 | 0.9252 | - | - | - |
0.5303 | 69200 | 0.9931 | - | - | - |
0.5310 | 69300 | 0.9877 | - | - | - |
0.5318 | 69400 | 0.9594 | - | - | - |
0.5326 | 69500 | 0.9569 | - | - | - |
0.5333 | 69600 | 0.9564 | - | - | - |
0.5341 | 69700 | 0.9692 | - | - | - |
0.5349 | 69800 | 0.9106 | - | - | - |
0.5356 | 69900 | 0.8954 | - | - | - |
0.5364 | 70000 | 1.0045 | 0.1596 | 0.9648 | - |
0.5372 | 70100 | 0.933 | - | - | - |
0.5379 | 70200 | 0.9637 | - | - | - |
0.5387 | 70300 | 0.924 | - | - | - |
0.5395 | 70400 | 0.9435 | - | - | - |
0.5402 | 70500 | 0.9692 | - | - | - |
0.5410 | 70600 | 0.9407 | - | - | - |
0.5418 | 70700 | 0.9437 | - | - | - |
0.5425 | 70800 | 0.9417 | - | - | - |
0.5433 | 70900 | 0.9367 | - | - | - |
0.5441 | 71000 | 0.9473 | - | - | - |
0.5448 | 71100 | 0.9482 | - | - | - |
0.5456 | 71200 | 0.9312 | - | - | - |
0.5464 | 71300 | 0.976 | - | - | - |
0.5471 | 71400 | 0.9542 | - | - | - |
0.5479 | 71500 | 0.9748 | - | - | - |
0.5487 | 71600 | 0.9263 | - | - | - |
0.5494 | 71700 | 0.9636 | - | - | - |
0.5502 | 71800 | 0.9603 | - | - | - |
0.5509 | 71900 | 0.9328 | - | - | - |
0.5517 | 72000 | 0.9588 | 0.1310 | 0.9752 | - |
0.5525 | 72100 | 0.9288 | - | - | - |
0.5532 | 72200 | 0.972 | - | - | - |
0.5540 | 72300 | 0.9276 | - | - | - |
0.5548 | 72400 | 0.9752 | - | - | - |
0.5555 | 72500 | 0.9437 | - | - | - |
0.5563 | 72600 | 0.9527 | - | - | - |
0.5571 | 72700 | 0.9365 | - | - | - |
0.5578 | 72800 | 0.9536 | - | - | - |
0.5586 | 72900 | 0.9111 | - | - | - |
0.5594 | 73000 | 0.9425 | - | - | - |
0.5601 | 73100 | 0.9342 | - | - | - |
0.5609 | 73200 | 0.9278 | - | - | - |
0.5617 | 73300 | 0.9335 | - | - | - |
0.5624 | 73400 | 0.9231 | - | - | - |
0.5632 | 73500 | 0.87 | - | - | - |
0.5640 | 73600 | 0.8865 | - | - | - |
0.5647 | 73700 | 0.927 | - | - | - |
0.5655 | 73800 | 0.8853 | - | - | - |
0.5663 | 73900 | 0.9467 | - | - | - |
0.5670 | 74000 | 0.9527 | 0.1750 | 0.965 | - |
0.5678 | 74100 | 0.9256 | - | - | - |
0.5686 | 74200 | 0.9032 | - | - | - |
0.5693 | 74300 | 0.9575 | - | - | - |
0.5701 | 74400 | 0.9578 | - | - | - |
0.5709 | 74500 | 0.8954 | - | - | - |
0.5716 | 74600 | 0.9007 | - | - | - |
0.5724 | 74700 | 0.9442 | - | - | - |
0.5732 | 74800 | 0.9296 | - | - | - |
0.5739 | 74900 | 0.8952 | - | - | - |
0.5747 | 75000 | 0.9135 | - | - | - |
0.5755 | 75100 | 0.9241 | - | - | - |
0.5762 | 75200 | 0.93 | - | - | - |
0.5770 | 75300 | 0.9438 | - | - | - |
0.5778 | 75400 | 0.9254 | - | - | - |
0.5785 | 75500 | 0.9127 | - | - | - |
0.5793 | 75600 | 0.9173 | - | - | - |
0.5801 | 75700 | 0.9779 | - | - | - |
0.5808 | 75800 | 0.9122 | - | - | - |
0.5816 | 75900 | 0.9286 | - | - | - |
0.5824 | 76000 | 0.9472 | 0.1710 | 0.9656 | - |
0.5831 | 76100 | 0.8931 | - | - | - |
0.5839 | 76200 | 0.9503 | - | - | - |
0.5847 | 76300 | 0.9395 | - | - | - |
0.5854 | 76400 | 0.9381 | - | - | - |
0.5862 | 76500 | 0.9208 | - | - | - |
0.5870 | 76600 | 0.9093 | - | - | - |
0.5877 | 76700 | 0.9175 | - | - | - |
0.5885 | 76800 | 0.9083 | - | - | - |
0.5893 | 76900 | 0.9291 | - | - | - |
0.5900 | 77000 | 0.954 | - | - | - |
0.5908 | 77100 | 0.8821 | - | - | - |
0.5916 | 77200 | 0.9228 | - | - | - |
0.5923 | 77300 | 0.938 | - | - | - |
0.5931 | 77400 | 0.975 | - | - | - |
0.5939 | 77500 | 0.8982 | - | - | - |
0.5946 | 77600 | 0.873 | - | - | - |
0.5954 | 77700 | 0.9226 | - | - | - |
0.5962 | 77800 | 0.9702 | - | - | - |
0.5969 | 77900 | 0.9134 | - | - | - |
0.5977 | 78000 | 0.9628 | 0.1979 | 0.9582 | - |
0.5985 | 78100 | 0.941 | - | - | - |
0.5992 | 78200 | 0.8893 | - | - | - |
0.6000 | 78300 | 0.9149 | - | - | - |
0.6008 | 78400 | 0.8923 | - | - | - |
0.6015 | 78500 | 0.9461 | - | - | - |
0.6023 | 78600 | 0.9059 | - | - | - |
0.6031 | 78700 | 0.8814 | - | - | - |
0.6038 | 78800 | 0.9173 | - | - | - |
0.6046 | 78900 | 0.9058 | - | - | - |
0.6054 | 79000 | 0.9053 | - | - | - |
0.6061 | 79100 | 0.9056 | - | - | - |
0.6069 | 79200 | 0.9078 | - | - | - |
0.6077 | 79300 | 0.9398 | - | - | - |
0.6084 | 79400 | 0.9458 | - | - | - |
0.6092 | 79500 | 0.9185 | - | - | - |
0.6100 | 79600 | 0.9493 | - | - | - |
0.6107 | 79700 | 0.9118 | - | - | - |
0.6115 | 79800 | 0.9426 | - | - | - |
0.6123 | 79900 | 0.8789 | - | - | - |
0.6130 | 80000 | 0.9457 | 0.1657 | 0.9666 | - |
0.6138 | 80100 | 0.9108 | - | - | - |
0.6145 | 80200 | 0.922 | - | - | - |
0.6153 | 80300 | 0.9139 | - | - | - |
0.6161 | 80400 | 0.8739 | - | - | - |
0.6168 | 80500 | 0.8914 | - | - | - |
0.6176 | 80600 | 0.9097 | - | - | - |
0.6184 | 80700 | 0.924 | - | - | - |
0.6191 | 80800 | 0.9178 | - | - | - |
0.6199 | 80900 | 0.885 | - | - | - |
0.6207 | 81000 | 0.9363 | - | - | - |
0.6214 | 81100 | 0.8954 | - | - | - |
0.6222 | 81200 | 0.8906 | - | - | - |
0.6230 | 81300 | 0.925 | - | - | - |
0.6237 | 81400 | 0.9083 | - | - | - |
0.6245 | 81500 | 0.9257 | - | - | - |
0.6253 | 81600 | 0.9054 | - | - | - |
0.6260 | 81700 | 0.8708 | - | - | - |
0.6268 | 81800 | 0.9376 | - | - | - |
0.6276 | 81900 | 0.8871 | - | - | - |
0.6283 | 82000 | 0.933 | 0.1743 | 0.9618 | - |
0.6291 | 82100 | 0.8358 | - | - | - |
0.6299 | 82200 | 0.8587 | - | - | - |
0.6306 | 82300 | 0.8752 | - | - | - |
0.6314 | 82400 | 0.8764 | - | - | - |
0.6322 | 82500 | 0.8677 | - | - | - |
0.6329 | 82600 | 0.894 | - | - | - |
0.6337 | 82700 | 0.8629 | - | - | - |
0.6345 | 82800 | 0.8981 | - | - | - |
0.6352 | 82900 | 0.8667 | - | - | - |
0.6360 | 83000 | 0.8082 | - | - | - |
0.6368 | 83100 | 0.843 | - | - | - |
0.6375 | 83200 | 0.9289 | - | - | - |
0.6383 | 83300 | 0.8797 | - | - | - |
0.6391 | 83400 | 0.844 | - | - | - |
0.6398 | 83500 | 0.8413 | - | - | - |
0.6406 | 83600 | 0.8655 | - | - | - |
0.6414 | 83700 | 0.8996 | - | - | - |
0.6421 | 83800 | 0.8182 | - | - | - |
0.6429 | 83900 | 0.8272 | - | - | - |
0.6437 | 84000 | 0.8596 | 0.1347 | 0.976 | - |
0.6444 | 84100 | 0.8392 | - | - | - |
0.6452 | 84200 | 0.8569 | - | - | - |
0.6460 | 84300 | 0.8441 | - | - | - |
0.6467 | 84400 | 0.8873 | - | - | - |
0.6475 | 84500 | 0.8965 | - | - | - |
0.6483 | 84600 | 0.8632 | - | - | - |
0.6490 | 84700 | 0.8183 | - | - | - |
0.6498 | 84800 | 0.8385 | - | - | - |
0.6506 | 84900 | 0.8123 | - | - | - |
0.6513 | 85000 | 0.8792 | - | - | - |
0.6521 | 85100 | 0.8762 | - | - | - |
0.6529 | 85200 | 0.7932 | - | - | - |
0.6536 | 85300 | 0.863 | - | - | - |
0.6544 | 85400 | 0.8714 | - | - | - |
0.6552 | 85500 | 0.7889 | - | - | - |
0.6559 | 85600 | 0.8466 | - | - | - |
0.6567 | 85700 | 0.8376 | - | - | - |
0.6575 | 85800 | 0.7704 | - | - | - |
0.6582 | 85900 | 0.829 | - | - | - |
0.6590 | 86000 | 0.8084 | 0.1359 | 0.9734 | - |
0.6598 | 86100 | 0.8495 | - | - | - |
0.6605 | 86200 | 0.8245 | - | - | - |
0.6613 | 86300 | 0.9183 | - | - | - |
0.6621 | 86400 | 0.8138 | - | - | - |
0.6628 | 86500 | 0.8572 | - | - | - |
0.6636 | 86600 | 0.8141 | - | - | - |
0.6644 | 86700 | 0.8724 | - | - | - |
0.6651 | 86800 | 0.8274 | - | - | - |
0.6659 | 86900 | 0.8455 | - | - | - |
0.6667 | 87000 | 0.8331 | - | - | - |
0.6674 | 87100 | 0.8653 | - | - | - |
0.6682 | 87200 | 0.7822 | - | - | - |
0.6690 | 87300 | 0.8233 | - | - | - |
0.6697 | 87400 | 0.811 | - | - | - |
0.6705 | 87500 | 0.813 | - | - | - |
0.6713 | 87600 | 0.8329 | - | - | - |
0.6720 | 87700 | 0.8006 | - | - | - |
0.6728 | 87800 | 0.8273 | - | - | - |
0.6736 | 87900 | 0.8308 | - | - | - |
0.6743 | 88000 | 0.8365 | 0.1680 | 0.9652 | - |
0.6751 | 88100 | 0.8167 | - | - | - |
0.6759 | 88200 | 0.8097 | - | - | - |
0.6766 | 88300 | 0.8065 | - | - | - |
0.6774 | 88400 | 0.858 | - | - | - |
0.6782 | 88500 | 0.832 | - | - | - |
0.6789 | 88600 | 0.8155 | - | - | - |
0.6797 | 88700 | 0.8127 | - | - | - |
0.6804 | 88800 | 0.7509 | - | - | - |
0.6812 | 88900 | 0.8078 | - | - | - |
0.6820 | 89000 | 0.874 | - | - | - |
0.6827 | 89100 | 0.8026 | - | - | - |
0.6835 | 89200 | 0.7962 | - | - | - |
0.6843 | 89300 | 0.8145 | - | - | - |
0.6850 | 89400 | 0.8691 | - | - | - |
0.6858 | 89500 | 0.8038 | - | - | - |
0.6866 | 89600 | 0.8424 | - | - | - |
0.6873 | 89700 | 0.8351 | - | - | - |
0.6881 | 89800 | 0.7891 | - | - | - |
0.6889 | 89900 | 0.8335 | - | - | - |
0.6896 | 90000 | 0.8108 | 0.1562 | 0.9648 | - |
0.6904 | 90100 | 0.8334 | - | - | - |
0.6912 | 90200 | 0.8095 | - | - | - |
0.6919 | 90300 | 0.8269 | - | - | - |
0.6927 | 90400 | 0.7553 | - | - | - |
0.6935 | 90500 | 0.7848 | - | - | - |
0.6942 | 90600 | 0.7454 | - | - | - |
0.6950 | 90700 | 0.7806 | - | - | - |
0.6958 | 90800 | 0.8073 | - | - | - |
0.6965 | 90900 | 0.8025 | - | - | - |
0.6973 | 91000 | 0.792 | - | - | - |
0.6981 | 91100 | 0.8019 | - | - | - |
0.6988 | 91200 | 0.7974 | - | - | - |
0.6996 | 91300 | 0.7981 | - | - | - |
0.7004 | 91400 | 0.7415 | - | - | - |
0.7011 | 91500 | 0.7934 | - | - | - |
0.7019 | 91600 | 0.7888 | - | - | - |
0.7027 | 91700 | 0.8012 | - | - | - |
0.7034 | 91800 | 0.8016 | - | - | - |
0.7042 | 91900 | 0.8099 | - | - | - |
0.7050 | 92000 | 0.8047 | 0.1948 | 0.9554 | - |
0.7057 | 92100 | 0.7944 | - | - | - |
0.7065 | 92200 | 0.834 | - | - | - |
0.7073 | 92300 | 0.797 | - | - | - |
0.7080 | 92400 | 0.789 | - | - | - |
0.7088 | 92500 | 0.7801 | - | - | - |
0.7096 | 92600 | 0.7613 | - | - | - |
0.7103 | 92700 | 0.7977 | - | - | - |
0.7111 | 92800 | 0.788 | - | - | - |
0.7119 | 92900 | 0.7751 | - | - | - |
0.7126 | 93000 | 0.7972 | - | - | - |
0.7134 | 93100 | 0.8149 | - | - | - |
0.7142 | 93200 | 0.7724 | - | - | - |
0.7149 | 93300 | 0.7962 | - | - | - |
0.7157 | 93400 | 0.8016 | - | - | - |
0.7165 | 93500 | 0.8238 | - | - | - |
0.7172 | 93600 | 0.8118 | - | - | - |
0.7180 | 93700 | 0.7519 | - | - | - |
0.7188 | 93800 | 0.7949 | - | - | - |
0.7195 | 93900 | 0.8123 | - | - | - |
0.7203 | 94000 | 0.8212 | 0.1774 | 0.9622 | - |
0.7211 | 94100 | 0.7563 | - | - | - |
0.7218 | 94200 | 0.8104 | - | - | - |
0.7226 | 94300 | 0.7946 | - | - | - |
0.7234 | 94400 | 0.7583 | - | - | - |
0.7241 | 94500 | 0.8039 | - | - | - |
0.7249 | 94600 | 0.7892 | - | - | - |
0.7257 | 94700 | 0.8001 | - | - | - |
0.7264 | 94800 | 0.7612 | - | - | - |
0.7272 | 94900 | 0.7363 | - | - | - |
0.7280 | 95000 | 0.8314 | - | - | - |
0.7287 | 95100 | 0.7611 | - | - | - |
0.7295 | 95200 | 0.78 | - | - | - |
0.7303 | 95300 | 0.7524 | - | - | - |
0.7310 | 95400 | 0.7708 | - | - | - |
0.7318 | 95500 | 0.8096 | - | - | - |
0.7326 | 95600 | 0.7839 | - | - | - |
0.7333 | 95700 | 0.7585 | - | - | - |
0.7341 | 95800 | 0.7316 | - | - | - |
0.7349 | 95900 | 0.7924 | - | - | - |
0.7356 | 96000 | 0.7869 | 0.1820 | 0.9574 | - |
0.7364 | 96100 | 0.7748 | - | - | - |
0.7372 | 96200 | 0.7863 | - | - | - |
0.7379 | 96300 | 0.7749 | - | - | - |
0.7387 | 96400 | 0.7627 | - | - | - |
0.7395 | 96500 | 0.7809 | - | - | - |
0.7402 | 96600 | 0.7733 | - | - | - |
0.7410 | 96700 | 0.7898 | - | - | - |
0.7418 | 96800 | 0.7804 | - | - | - |
0.7425 | 96900 | 0.7812 | - | - | - |
0.7433 | 97000 | 0.8134 | - | - | - |
0.7440 | 97100 | 0.7542 | - | - | - |
0.7448 | 97200 | 0.8209 | - | - | - |
0.7456 | 97300 | 0.7689 | - | - | - |
0.7463 | 97400 | 0.8095 | - | - | - |
0.7471 | 97500 | 0.7806 | - | - | - |
0.7479 | 97600 | 0.7757 | - | - | - |
0.7486 | 97700 | 0.7941 | - | - | - |
0.7494 | 97800 | 0.8171 | - | - | - |
0.7502 | 97900 | 0.7946 | - | - | - |
0.7509 | 98000 | 0.7825 | 0.1815 | 0.9586 | - |
0.7517 | 98100 | 0.7709 | - | - | - |
0.7525 | 98200 | 0.7646 | - | - | - |
0.7532 | 98300 | 0.765 | - | - | - |
0.7540 | 98400 | 0.7812 | - | - | - |
0.7548 | 98500 | 0.7277 | - | - | - |
0.7555 | 98600 | 0.7471 | - | - | - |
0.7563 | 98700 | 0.8027 | - | - | - |
0.7571 | 98800 | 0.7509 | - | - | - |
0.7578 | 98900 | 0.7898 | - | - | - |
0.7586 | 99000 | 0.8319 | - | - | - |
0.7594 | 99100 | 0.7737 | - | - | - |
0.7601 | 99200 | 0.7546 | - | - | - |
0.7609 | 99300 | 0.7669 | - | - | - |
0.7617 | 99400 | 0.7928 | - | - | - |
0.7624 | 99500 | 0.735 | - | - | - |
0.7632 | 99600 | 0.7852 | - | - | - |
0.7640 | 99700 | 0.7827 | - | - | - |
0.7647 | 99800 | 0.7933 | - | - | - |
0.7655 | 99900 | 0.7767 | - | - | - |
0.7663 | 100000 | 0.7515 | 0.1655 | 0.9632 | - |
0.7670 | 100100 | 0.7787 | - | - | - |
0.7678 | 100200 | 0.7528 | - | - | - |
0.7686 | 100300 | 0.7858 | - | - | - |
0.7693 | 100400 | 0.7492 | - | - | - |
0.7701 | 100500 | 0.7622 | - | - | - |
0.7709 | 100600 | 0.7647 | - | - | - |
0.7716 | 100700 | 0.7822 | - | - | - |
0.7724 | 100800 | 0.7673 | - | - | - |
0.7732 | 100900 | 0.774 | - | - | - |
0.7739 | 101000 | 0.7627 | - | - | - |
0.7747 | 101100 | 0.7456 | - | - | - |
0.7755 | 101200 | 0.8082 | - | - | - |
0.7762 | 101300 | 0.773 | - | - | - |
0.7770 | 101400 | 0.779 | - | - | - |
0.7778 | 101500 | 0.7946 | - | - | - |
0.7785 | 101600 | 0.7823 | - | - | - |
0.7793 | 101700 | 0.7499 | - | - | - |
0.7801 | 101800 | 0.8175 | - | - | - |
0.7808 | 101900 | 0.8097 | - | - | - |
0.7816 | 102000 | 0.7561 | 0.1779 | 0.963 | - |
0.7824 | 102100 | 0.7691 | - | - | - |
0.7831 | 102200 | 0.784 | - | - | - |
0.7839 | 102300 | 0.7468 | - | - | - |
0.7847 | 102400 | 0.8237 | - | - | - |
0.7854 | 102500 | 0.7578 | - | - | - |
0.7862 | 102600 | 0.7622 | - | - | - |
0.7870 | 102700 | 0.844 | - | - | - |
0.7877 | 102800 | 0.8233 | - | - | - |
0.7885 | 102900 | 0.7852 | - | - | - |
0.7893 | 103000 | 0.8253 | - | - | - |
0.7900 | 103100 | 0.7684 | - | - | - |
0.7908 | 103200 | 0.7489 | - | - | - |
0.7916 | 103300 | 0.7767 | - | - | - |
0.7923 | 103400 | 0.7859 | - | - | - |
0.7931 | 103500 | 0.7739 | - | - | - |
0.7939 | 103600 | 0.7303 | - | - | - |
0.7946 | 103700 | 0.7546 | - | - | - |
0.7954 | 103800 | 0.7719 | - | - | - |
0.7962 | 103900 | 0.7511 | - | - | - |
0.7969 | 104000 | 0.7531 | 0.1643 | 0.9646 | - |
0.7977 | 104100 | 0.7297 | - | - | - |
0.7985 | 104200 | 0.7698 | - | - | - |
0.7992 | 104300 | 0.774 | - | - | - |
0.8000 | 104400 | 0.8124 | - | - | - |
0.8008 | 104500 | 0.8012 | - | - | - |
0.8015 | 104600 | 0.8163 | - | - | - |
0.8023 | 104700 | 0.7677 | - | - | - |
0.8031 | 104800 | 0.8017 | - | - | - |
0.8038 | 104900 | 0.7194 | - | - | - |
0.8046 | 105000 | 0.7623 | - | - | - |
0.8054 | 105100 | 0.7559 | - | - | - |
0.8061 | 105200 | 0.7735 | - | - | - |
0.8069 | 105300 | 0.7338 | - | - | - |
0.8077 | 105400 | 0.7104 | - | - | - |
0.8084 | 105500 | 0.7607 | - | - | - |
0.8092 | 105600 | 0.823 | - | - | - |
0.8099 | 105700 | 0.8029 | - | - | - |
0.8107 | 105800 | 0.7811 | - | - | - |
0.8115 | 105900 | 0.7794 | - | - | - |
0.8122 | 106000 | 0.7782 | 0.1784 | 0.961 | - |
0.8130 | 106100 | 0.7459 | - | - | - |
0.8138 | 106200 | 0.744 | - | - | - |
0.8145 | 106300 | 0.7681 | - | - | - |
0.8153 | 106400 | 0.7698 | - | - | - |
0.8161 | 106500 | 0.7359 | - | - | - |
0.8168 | 106600 | 0.781 | - | - | - |
0.8176 | 106700 | 0.7251 | - | - | - |
0.8184 | 106800 | 0.7478 | - | - | - |
0.8191 | 106900 | 0.7782 | - | - | - |
0.8199 | 107000 | 0.7464 | - | - | - |
0.8207 | 107100 | 0.6965 | - | - | - |
0.8214 | 107200 | 0.7368 | - | - | - |
0.8222 | 107300 | 0.7081 | - | - | - |
0.8230 | 107400 | 0.7037 | - | - | - |
0.8237 | 107500 | 0.6953 | - | - | - |
0.8245 | 107600 | 0.7169 | - | - | - |
0.8253 | 107700 | 0.7177 | - | - | - |
0.8260 | 107800 | 0.6925 | - | - | - |
0.8268 | 107900 | 0.6474 | - | - | - |
0.8276 | 108000 | 0.6675 | 0.1514 | 0.9684 | - |
0.8283 | 108100 | 0.6432 | - | - | - |
0.8291 | 108200 | 0.7523 | - | - | - |
0.8299 | 108300 | 0.6864 | - | - | - |
0.8306 | 108400 | 0.7036 | - | - | - |
0.8314 | 108500 | 0.6708 | - | - | - |
0.8322 | 108600 | 0.6739 | - | - | - |
0.8329 | 108700 | 0.6925 | - | - | - |
0.8337 | 108800 | 0.6101 | - | - | - |
0.8345 | 108900 | 0.653 | - | - | - |
0.8352 | 109000 | 0.664 | - | - | - |
0.8360 | 109100 | 0.6638 | - | - | - |
0.8368 | 109200 | 0.6587 | - | - | - |
0.8375 | 109300 | 0.6837 | - | - | - |
0.8383 | 109400 | 0.6813 | - | - | - |
0.8391 | 109500 | 0.6678 | - | - | - |
0.8398 | 109600 | 0.6601 | - | - | - |
0.8406 | 109700 | 0.61 | - | - | - |
0.8414 | 109800 | 0.6185 | - | - | - |
0.8421 | 109900 | 0.5919 | - | - | - |
0.8429 | 110000 | 0.6647 | 0.1559 | 0.9656 | - |
0.8437 | 110100 | 0.6891 | - | - | - |
0.8444 | 110200 | 0.652 | - | - | - |
0.8452 | 110300 | 0.6482 | - | - | - |
0.8460 | 110400 | 0.6493 | - | - | - |
0.8467 | 110500 | 0.5998 | - | - | - |
0.8475 | 110600 | 0.665 | - | - | - |
0.8483 | 110700 | 0.6228 | - | - | - |
0.8490 | 110800 | 0.6149 | - | - | - |
0.8498 | 110900 | 0.6488 | - | - | - |
0.8506 | 111000 | 0.61 | - | - | - |
0.8513 | 111100 | 0.657 | - | - | - |
0.8521 | 111200 | 0.638 | - | - | - |
0.8529 | 111300 | 0.6588 | - | - | - |
0.8536 | 111400 | 0.6086 | - | - | - |
0.8544 | 111500 | 0.6365 | - | - | - |
0.8552 | 111600 | 0.6066 | - | - | - |
0.8559 | 111700 | 0.663 | - | - | - |
0.8567 | 111800 | 0.5876 | - | - | - |
0.8575 | 111900 | 0.6153 | - | - | - |
0.8582 | 112000 | 0.6502 | 0.1445 | 0.9698 | - |
0.8590 | 112100 | 0.6061 | - | - | - |
0.8598 | 112200 | 0.6064 | - | - | - |
0.8605 | 112300 | 0.5988 | - | - | - |
0.8613 | 112400 | 0.5941 | - | - | - |
0.8621 | 112500 | 0.6372 | - | - | - |
0.8628 | 112600 | 0.652 | - | - | - |
0.8636 | 112700 | 0.5988 | - | - | - |
0.8644 | 112800 | 0.5789 | - | - | - |
0.8651 | 112900 | 0.5987 | - | - | - |
0.8659 | 113000 | 0.6204 | - | - | - |
0.8667 | 113100 | 0.6054 | - | - | - |
0.8674 | 113200 | 0.5752 | - | - | - |
0.8682 | 113300 | 0.6005 | - | - | - |
0.8690 | 113400 | 0.6158 | - | - | - |
0.8697 | 113500 | 0.6136 | - | - | - |
0.8705 | 113600 | 0.6227 | - | - | - |
0.8713 | 113700 | 0.6229 | - | - | - |
0.8720 | 113800 | 0.539 | - | - | - |
0.8728 | 113900 | 0.606 | - | - | - |
0.8735 | 114000 | 0.6278 | 0.1484 | 0.9696 | - |
0.8743 | 114100 | 0.595 | - | - | - |
0.8751 | 114200 | 0.5903 | - | - | - |
0.8758 | 114300 | 0.6173 | - | - | - |
0.8766 | 114400 | 0.6345 | - | - | - |
0.8774 | 114500 | 0.6205 | - | - | - |
0.8781 | 114600 | 0.5783 | - | - | - |
0.8789 | 114700 | 0.5859 | - | - | - |
0.8797 | 114800 | 0.588 | - | - | - |
0.8804 | 114900 | 0.601 | - | - | - |
0.8812 | 115000 | 0.5924 | - | - | - |
0.8820 | 115100 | 0.6528 | - | - | - |
0.8827 | 115200 | 0.6359 | - | - | - |
0.8835 | 115300 | 0.5895 | - | - | - |
0.8843 | 115400 | 0.5417 | - | - | - |
0.8850 | 115500 | 0.5728 | - | - | - |
0.8858 | 115600 | 0.5493 | - | - | - |
0.8866 | 115700 | 0.5687 | - | - | - |
0.8873 | 115800 | 0.5954 | - | - | - |
0.8881 | 115900 | 0.5786 | - | - | - |
0.8889 | 116000 | 0.6036 | 0.1424 | 0.9698 | - |
0.8896 | 116100 | 0.5575 | - | - | - |
0.8904 | 116200 | 0.5787 | - | - | - |
0.8912 | 116300 | 0.6071 | - | - | - |
0.8919 | 116400 | 0.5871 | - | - | - |
0.8927 | 116500 | 0.5929 | - | - | - |
0.8935 | 116600 | 0.5926 | - | - | - |
0.8942 | 116700 | 0.6003 | - | - | - |
0.8950 | 116800 | 0.5767 | - | - | - |
0.8958 | 116900 | 0.59 | - | - | - |
0.8965 | 117000 | 0.5877 | - | - | - |
0.8973 | 117100 | 0.5613 | - | - | - |
0.8981 | 117200 | 0.5706 | - | - | - |
0.8988 | 117300 | 0.5777 | - | - | - |
0.8996 | 117400 | 0.5986 | - | - | - |
0.9004 | 117500 | 0.611 | - | - | - |
0.9011 | 117600 | 0.5516 | - | - | - |
0.9019 | 117700 | 0.6038 | - | - | - |
0.9027 | 117800 | 0.5589 | - | - | - |
0.9034 | 117900 | 0.5935 | - | - | - |
0.9042 | 118000 | 0.5896 | 0.1477 | 0.9696 | - |
0.9050 | 118100 | 0.5737 | - | - | - |
0.9057 | 118200 | 0.5562 | - | - | - |
0.9065 | 118300 | 0.5624 | - | - | - |
0.9073 | 118400 | 0.6038 | - | - | - |
0.9080 | 118500 | 0.5552 | - | - | - |
0.9088 | 118600 | 0.5646 | - | - | - |
0.9096 | 118700 | 0.5629 | - | - | - |
0.9103 | 118800 | 0.5674 | - | - | - |
0.9111 | 118900 | 0.5716 | - | - | - |
0.9119 | 119000 | 0.5767 | - | - | - |
0.9126 | 119100 | 0.5352 | - | - | - |
0.9134 | 119200 | 0.59 | - | - | - |
0.9142 | 119300 | 0.5584 | - | - | - |
0.9149 | 119400 | 0.5769 | - | - | - |
0.9157 | 119500 | 0.5906 | - | - | - |
0.9165 | 119600 | 0.5807 | - | - | - |
0.9172 | 119700 | 0.5469 | - | - | - |
0.9180 | 119800 | 0.9169 | - | - | - |
0.9188 | 119900 | 0.9665 | - | - | - |
0.9195 | 120000 | 1.0132 | 0.1389 | 0.9704 | - |
0.9203 | 120100 | 0.9572 | - | - | - |
0.9211 | 120200 | 0.9023 | - | - | - |
0.9218 | 120300 | 0.6828 | - | - | - |
0.9226 | 120400 | 0.7277 | - | - | - |
0.9234 | 120500 | 0.7439 | - | - | - |
0.9241 | 120600 | 0.7554 | - | - | - |
0.9249 | 120700 | 0.7376 | - | - | - |
0.9257 | 120800 | 0.6783 | - | - | - |
0.9264 | 120900 | 0.7071 | - | - | - |
0.9272 | 121000 | 0.7251 | - | - | - |
0.9280 | 121100 | 0.7385 | - | - | - |
0.9287 | 121200 | 0.7207 | - | - | - |
0.9295 | 121300 | 0.7903 | - | - | - |
0.9303 | 121400 | 0.7863 | - | - | - |
0.9310 | 121500 | 0.7672 | - | - | - |
0.9318 | 121600 | 0.6873 | - | - | - |
0.9326 | 121700 | 0.7526 | - | - | - |
0.9333 | 121800 | 0.764 | - | - | - |
0.9341 | 121900 | 0.7827 | - | - | - |
0.9349 | 122000 | 0.8713 | 0.1329 | 0.9706 | - |
0.9356 | 122100 | 0.7872 | - | - | - |
0.9364 | 122200 | 0.6837 | - | - | - |
0.9372 | 122300 | 0.6017 | - | - | - |
0.9379 | 122400 | 0.6283 | - | - | - |
0.9387 | 122500 | 0.758 | - | - | - |
0.9394 | 122600 | 0.694 | - | - | - |
0.9402 | 122700 | 0.7112 | - | - | - |
0.9410 | 122800 | 0.7566 | - | - | - |
0.9417 | 122900 | 0.7118 | - | - | - |
0.9425 | 123000 | 0.6594 | - | - | - |
0.9433 | 123100 | 0.678 | - | - | - |
0.9440 | 123200 | 0.6626 | - | - | - |
0.9448 | 123300 | 0.6724 | - | - | - |
0.9456 | 123400 | 0.7042 | - | - | - |
0.9463 | 123500 | 0.6526 | - | - | - |
0.9471 | 123600 | 0.7039 | - | - | - |
0.9479 | 123700 | 0.6459 | - | - | - |
0.9486 | 123800 | 0.5759 | - | - | - |
0.9494 | 123900 | 0.6211 | - | - | - |
0.9502 | 124000 | 0.6905 | 0.1363 | 0.9698 | - |
0.9509 | 124100 | 0.6422 | - | - | - |
0.9517 | 124200 | 0.668 | - | - | - |
0.9525 | 124300 | 0.5819 | - | - | - |
0.9532 | 124400 | 0.661 | - | - | - |
0.9540 | 124500 | 0.6243 | - | - | - |
0.9548 | 124600 | 0.5936 | - | - | - |
0.9555 | 124700 | 0.5736 | - | - | - |
0.9563 | 124800 | 0.5955 | - | - | - |
0.9571 | 124900 | 0.5115 | - | - | - |
0.9578 | 125000 | 0.5495 | - | - | - |
0.9586 | 125100 | 0.5858 | - | - | - |
0.9594 | 125200 | 0.5644 | - | - | - |
0.9601 | 125300 | 0.5262 | - | - | - |
0.9609 | 125400 | 0.5588 | - | - | - |
0.9617 | 125500 | 0.7303 | - | - | - |
0.9624 | 125600 | 0.6894 | - | - | - |
0.9632 | 125700 | 0.5998 | - | - | - |
0.9640 | 125800 | 0.6216 | - | - | - |
0.9647 | 125900 | 0.5922 | - | - | - |
0.9655 | 126000 | 0.6121 | 0.1362 | 0.9694 | - |
0.9663 | 126100 | 0.5996 | - | - | - |
0.9670 | 126200 | 0.5894 | - | - | - |
0.9678 | 126300 | 0.5347 | - | - | - |
0.9686 | 126400 | 0.5803 | - | - | - |
0.9693 | 126500 | 0.6104 | - | - | - |
0.9701 | 126600 | 0.7723 | - | - | - |
0.9709 | 126700 | 0.7577 | - | - | - |
0.9716 | 126800 | 0.7315 | - | - | - |
0.9724 | 126900 | 0.7341 | - | - | - |
0.9732 | 127000 | 0.7652 | - | - | - |
0.9739 | 127100 | 0.6364 | - | - | - |
0.9747 | 127200 | 0.7246 | - | - | - |
0.9755 | 127300 | 1.2764 | - | - | - |
0.9762 | 127400 | 1.0487 | - | - | - |
0.9770 | 127500 | 0.9892 | - | - | - |
0.9778 | 127600 | 0.767 | - | - | - |
0.9785 | 127700 | 0.7633 | - | - | - |
0.9793 | 127800 | 0.8695 | - | - | - |
0.9801 | 127900 | 0.648 | - | - | - |
0.9808 | 128000 | 0.6227 | 0.1418 | 0.97 | - |
0.9816 | 128100 | 0.5542 | - | - | - |
0.9824 | 128200 | 0.5535 | - | - | - |
0.9831 | 128300 | 0.7327 | - | - | - |
0.9839 | 128400 | 0.6541 | - | - | - |
0.9847 | 128500 | 0.6697 | - | - | - |
0.9854 | 128600 | 0.5708 | - | - | - |
0.9862 | 128700 | 0.6833 | - | - | - |
0.9870 | 128800 | 0.6328 | - | - | - |
0.9877 | 128900 | 0.7026 | - | - | - |
0.9885 | 129000 | 0.5497 | - | - | - |
0.9893 | 129100 | 0.5846 | - | - | - |
0.9900 | 129200 | 0.5708 | - | - | - |
0.9908 | 129300 | 0.7514 | - | - | - |
0.9916 | 129400 | 0.5386 | - | - | - |
0.9923 | 129500 | 0.7419 | - | - | - |
0.9931 | 129600 | 0.8613 | - | - | - |
0.9939 | 129700 | 0.8322 | - | - | - |
0.9946 | 129800 | 0.7606 | - | - | - |
0.9954 | 129900 | 0.7086 | - | - | - |
0.9962 | 130000 | 0.6828 | 0.1488 | 0.9688 | - |
0.9969 | 130100 | 0.8267 | - | - | - |
0.9977 | 130200 | 0.8491 | - | - | - |
0.9985 | 130300 | 1.0619 | - | - | - |
0.9992 | 130400 | 0.8276 | - | - | - |
1.0000 | 130500 | 0.8043 | - | - | - |
1.0 | 130502 | - | - | - | 0.9726 |
Framework Versions
- Python: 3.11.8
- Sentence Transformers: 3.1.1
- Transformers: 4.44.0
- PyTorch: 2.3.0.post101
- Accelerate: 0.33.0
- Datasets: 3.0.2
- Tokenizers: 0.19.0
Citation
BibTeX
Sentence Transformers
@inproceedings{reimers-2019-sentence-bert,
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
author = "Reimers, Nils and Gurevych, Iryna",
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
month = "11",
year = "2019",
publisher = "Association for Computational Linguistics",
url = "https://arxiv.org/abs/1908.10084",
}
MultipleNegativesRankingLoss
@misc{henderson2017efficient,
title={Efficient Natural Language Response Suggestion for Smart Reply},
author={Matthew Henderson and Rami Al-Rfou and Brian Strope and Yun-hsuan Sung and Laszlo Lukacs and Ruiqi Guo and Sanjiv Kumar and Balint Miklos and Ray Kurzweil},
year={2017},
eprint={1705.00652},
archivePrefix={arXiv},
primaryClass={cs.CL}
}