adapter-8classes-single_label / all_results.json
Faeze's picture
Upload folder using huggingface_hub
5f2f386 verified
raw
history blame
3.9 kB
{
"epoch": 200.0,
"eval_accuracy": 77.03703703703704,
"eval_average_metrics": 77.53590008051296,
"eval_classification_report": "{\"acceptance\": {\"precision\": 1.0, \"recall\": 0.8888888888888888, \"f1-score\": 0.9411764705882353, \"support\": 9.0}, \"apology\": {\"precision\": 1.0, \"recall\": 0.5, \"f1-score\": 0.6666666666666666, \"support\": 2.0}, \"appreciation\": {\"precision\": 0.7692307692307693, \"recall\": 0.9090909090909091, \"f1-score\": 0.8333333333333333, \"support\": 11.0}, \"challenge\": {\"precision\": 0.7105263157894737, \"recall\": 0.75, \"f1-score\": 0.7297297297297298, \"support\": 36.0}, \"informing statement\": {\"precision\": 0.7941176470588235, \"recall\": 0.6923076923076923, \"f1-score\": 0.7397260273972601, \"support\": 39.0}, \"question\": {\"precision\": 0.7777777777777778, \"recall\": 0.7777777777777778, \"f1-score\": 0.7777777777777778, \"support\": 9.0}, \"rejection\": {\"precision\": 1.0, \"recall\": 0.8461538461538461, \"f1-score\": 0.9166666666666666, \"support\": 13.0}, \"request\": {\"precision\": 0.6190476190476191, \"recall\": 0.8125, \"f1-score\": 0.7027027027027026, \"support\": 16.0}, \"accuracy\": 0.7703703703703704, \"macro avg\": {\"precision\": 0.833837516113058, \"recall\": 0.7720898892773893, \"f1-score\": 0.7884724218577965, \"support\": 135.0}, \"weighted avg\": {\"precision\": 0.784561747926041, \"recall\": 0.7703703703703704, \"f1-score\": 0.7722228406219811, \"support\": 135.0}}",
"eval_f1_macro": 78.84724218577965,
"eval_f1_micro": 77.03703703703704,
"eval_f1_weighted": 77.22228406219811,
"eval_loss": 1.0223504304885864,
"eval_runtime": 1.7125,
"eval_samples_per_second": 78.832,
"init_mem_cpu_alloc_delta": -543014912,
"init_mem_cpu_peaked_delta": 543023104,
"init_mem_gpu_alloc_delta": 891528192,
"init_mem_gpu_peaked_delta": 0,
"peak_memory": 5.305544921875,
"test_accuracy": 77.03703703703704,
"test_average_metrics": 77.53590008051296,
"test_classification_report": "{\"acceptance\": {\"precision\": 1.0, \"recall\": 0.8888888888888888, \"f1-score\": 0.9411764705882353, \"support\": 9.0}, \"apology\": {\"precision\": 1.0, \"recall\": 0.5, \"f1-score\": 0.6666666666666666, \"support\": 2.0}, \"appreciation\": {\"precision\": 0.7692307692307693, \"recall\": 0.9090909090909091, \"f1-score\": 0.8333333333333333, \"support\": 11.0}, \"challenge\": {\"precision\": 0.7105263157894737, \"recall\": 0.75, \"f1-score\": 0.7297297297297298, \"support\": 36.0}, \"informing statement\": {\"precision\": 0.7941176470588235, \"recall\": 0.6923076923076923, \"f1-score\": 0.7397260273972601, \"support\": 39.0}, \"question\": {\"precision\": 0.7777777777777778, \"recall\": 0.7777777777777778, \"f1-score\": 0.7777777777777778, \"support\": 9.0}, \"rejection\": {\"precision\": 1.0, \"recall\": 0.8461538461538461, \"f1-score\": 0.9166666666666666, \"support\": 13.0}, \"request\": {\"precision\": 0.6190476190476191, \"recall\": 0.8125, \"f1-score\": 0.7027027027027026, \"support\": 16.0}, \"accuracy\": 0.7703703703703704, \"macro avg\": {\"precision\": 0.833837516113058, \"recall\": 0.7720898892773893, \"f1-score\": 0.7884724218577965, \"support\": 135.0}, \"weighted avg\": {\"precision\": 0.784561747926041, \"recall\": 0.7703703703703704, \"f1-score\": 0.7722228406219811, \"support\": 135.0}}",
"test_f1_macro": 78.84724218577965,
"test_f1_micro": 77.03703703703704,
"test_f1_weighted": 77.22228406219811,
"test_loss": 1.0223504304885864,
"test_runtime": 1.6896,
"test_samples_per_second": 79.902,
"total_time in minutes ": 37.44455833333333,
"train_mem_cpu_alloc_delta": 1023647744,
"train_mem_cpu_peaked_delta": 32768,
"train_mem_gpu_alloc_delta": 40251392,
"train_mem_gpu_peaked_delta": 4624237568,
"train_runtime": 2245.6861,
"train_samples": 541,
"train_samples_per_second": 1.514
}