Datasets:

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
License:
ayushi0430's picture
change model name
40697a3
{
"config": {
"model": "LaminiModel",
"model_args": null,
"batch_size": 1,
"batch_sizes": [],
"device": null,
"use_cache": null,
"limit": 100,
"bootstrap_iters": 100000,
"gen_kwargs": null,
"model_dtype": "bfloat16",
"model_name": "icd-ft-mistral-7b-instruct-hf",
"model_sha": "main"
},
"results": {
"product_response_subjective_score": {
"product_response_subjective_score": 3.1875
},
"product_id_precision_score": {
"product_id_precision_score": 0.0
},
"earnings_response_subjective_score": {
"earnings_response_subjective_score": 2.2363636363636363
},
"earnings_precision_score": {
"earnings_precision_score": 0.01818181818181818
},
"icd11_response_subjective_score": {
"icd11_response_subjective_score": 2.2888888888888888
},
"icd11_precision_score": {
"icd11_precision_score": 0.2
},
"mmlu_flan_n_shot_generative_global_facts": {
"exact_match,strict-match": 0.34,
"exact_match_stderr,strict-match": 0.047609522856952344,
"exact_match,flexible-extract": 0.34,
"exact_match_stderr,flexible-extract": 0.047609522856952344,
"alias": "mmlu_flan_n_shot_generative_global_facts"
},
"truthfulqa_gen": {
"bleu_max,none": 17.795881878483,
"bleu_max_stderr,none": 1.813182129677248,
"bleu_acc,none": 0.53,
"bleu_acc_stderr,none": 0.050161355804659205,
"bleu_diff,none": -2.125661702465655,
"bleu_diff_stderr,none": 1.434744252223102,
"rouge1_max,none": 39.008845311741894,
"rouge1_max_stderr,none": 2.114340485165445,
"rouge1_acc,none": 0.52,
"rouge1_acc_stderr,none": 0.050211673156867795,
"rouge1_diff,none": -1.6407676542576224,
"rouge1_diff_stderr,none": 1.797673507015776,
"rouge2_max,none": 27.163127972432466,
"rouge2_max_stderr,none": 2.1194097651441424,
"rouge2_acc,none": 0.49,
"rouge2_acc_stderr,none": 0.05024183937956913,
"rouge2_diff,none": -2.254914453263179,
"rouge2_diff_stderr,none": 1.9148845239446861,
"rougeL_max,none": 36.01824298531865,
"rougeL_max_stderr,none": 2.128032657919697,
"rougeL_acc,none": 0.53,
"rougeL_acc_stderr,none": 0.05016135580465919,
"rougeL_diff,none": -1.8971433636477628,
"rougeL_diff_stderr,none": 1.8261427558039585,
"alias": "truthfulqa_gen"
}
}
}