File size: 1,714 Bytes
81d107a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 |
{
"dataset_revision": "1399c76144fd37290681b995c656ef9b2e06e26d",
"task_name": "AmazonReviewsClassification",
"mteb_version": "1.25.1",
"scores": {
"test": [
{
"accuracy": 0.32318,
"f1": 0.319973,
"f1_weighted": 0.319973,
"scores_per_experiment": [
{
"accuracy": 0.341,
"f1": 0.333855,
"f1_weighted": 0.333855
},
{
"accuracy": 0.3368,
"f1": 0.338041,
"f1_weighted": 0.338041
},
{
"accuracy": 0.3336,
"f1": 0.330524,
"f1_weighted": 0.330524
},
{
"accuracy": 0.3086,
"f1": 0.309647,
"f1_weighted": 0.309647
},
{
"accuracy": 0.3114,
"f1": 0.305097,
"f1_weighted": 0.305097
},
{
"accuracy": 0.3334,
"f1": 0.330257,
"f1_weighted": 0.330257
},
{
"accuracy": 0.299,
"f1": 0.292306,
"f1_weighted": 0.292306
},
{
"accuracy": 0.3384,
"f1": 0.336788,
"f1_weighted": 0.336788
},
{
"accuracy": 0.3242,
"f1": 0.31879,
"f1_weighted": 0.31879
},
{
"accuracy": 0.3054,
"f1": 0.304421,
"f1_weighted": 0.304421
}
],
"main_score": 0.32318,
"hf_subset": "en",
"languages": [
"eng-Latn"
]
}
]
},
"evaluation_time": 9.712511777877808,
"kg_co2_emissions": null
} |