Datasets:

Modalities:
Text
Formats:
json
Size:
< 1K
Libraries:
Datasets
Dask
License:
ayushi0430's picture
add results
4814609
raw
history blame
2.75 kB
{
"config": {
"model": "LaminiModel",
"model_args": null,
"batch_size": 1,
"batch_sizes": [],
"device": null,
"use_cache": null,
"limit": 100,
"bootstrap_iters": 100000,
"gen_kwargs": null,
"model_dtype": "bfloat16",
"model_name": "products-ft-llama2-7b-chat-hf",
"model_sha": "main"
},
"results": {
"product_response_subjective_score": {
"product_response_subjective_score": 2.823529411764706
},
"product_id_precision_score": {
"product_id_precision_score": 0.0
},
"earnings_response_subjective_score": {
"earnings_response_subjective_score": 1.3
},
"earnings_precision_score": {
"earnings_precision_score": 0.0
},
"icd11_response_subjective_score": {
"icd11_response_subjective_score": 1.0
},
"icd11_precision_score": {
"icd11_precision_score": 0.042105263157894736
},
"mmlu_flan_n_shot_generative_global_facts": {
"exact_match,strict-match": 0.24,
"exact_match_stderr,strict-match": 0.04292346959909281,
"exact_match,flexible-extract": 0.29,
"exact_match_stderr,flexible-extract": 0.045604802157206845,
"alias": "mmlu_flan_n_shot_generative_global_facts"
},
"truthfulqa_gen": {
"bleu_max,none": 6.531036466069969,
"bleu_max_stderr,none": 0.9043289680312812,
"bleu_acc,none": 0.46,
"bleu_acc_stderr,none": 0.05009082659620332,
"bleu_diff,none": -0.9323317213212583,
"bleu_diff_stderr,none": 0.8067385601751962,
"rouge1_max,none": 20.89456732926107,
"rouge1_max_stderr,none": 1.4865748512002068,
"rouge1_acc,none": 0.44,
"rouge1_acc_stderr,none": 0.04988876515698589,
"rouge1_diff,none": -0.9440419957596459,
"rouge1_diff_stderr,none": 1.0937789188254103,
"rouge2_max,none": 13.502001575762279,
"rouge2_max_stderr,none": 1.3536063014199604,
"rouge2_acc,none": 0.4,
"rouge2_acc_stderr,none": 0.049236596391733084,
"rouge2_diff,none": -1.2095201954204824,
"rouge2_diff_stderr,none": 1.1259112814340064,
"rougeL_max,none": 19.029126902563377,
"rougeL_max_stderr,none": 1.4521123787780108,
"rougeL_acc,none": 0.4,
"rougeL_acc_stderr,none": 0.049236596391733084,
"rougeL_diff,none": -0.9873337635962663,
"rougeL_diff_stderr,none": 0.9428926541420342,
"alias": "truthfulqa_gen"
}
}
}