|
{ |
|
"best_metric": 0.8919460068914353, |
|
"best_model_checkpoint": "cls_comment-phobert-base-v2-v2.4.1/checkpoint-2300", |
|
"epoch": 26.794258373205743, |
|
"eval_steps": 100, |
|
"global_step": 2800, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 1.6857236623764038, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.7049, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"eval_accuracy": 0.46144614461446143, |
|
"eval_f1_score": 0.10801682151937281, |
|
"eval_loss": 1.4949835538864136, |
|
"eval_precision": 0.21007122078443174, |
|
"eval_recall": 0.16814650388457272, |
|
"eval_runtime": 6.4303, |
|
"eval_samples_per_second": 518.327, |
|
"eval_steps_per_second": 8.242, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"grad_norm": 2.8904168605804443, |
|
"learning_rate": 5e-06, |
|
"loss": 1.3066, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"eval_accuracy": 0.6597659765976598, |
|
"eval_f1_score": 0.24932737263473323, |
|
"eval_loss": 1.0450588464736938, |
|
"eval_precision": 0.21503708017040588, |
|
"eval_recall": 0.29703415957578, |
|
"eval_runtime": 6.2935, |
|
"eval_samples_per_second": 529.594, |
|
"eval_steps_per_second": 8.421, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 4.716286659240723, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.9457, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"eval_accuracy": 0.7971797179717972, |
|
"eval_f1_score": 0.5218513395163904, |
|
"eval_loss": 0.7491273283958435, |
|
"eval_precision": 0.5237982031957714, |
|
"eval_recall": 0.5230368404187595, |
|
"eval_runtime": 6.3214, |
|
"eval_samples_per_second": 527.257, |
|
"eval_steps_per_second": 8.384, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 4.255033016204834, |
|
"learning_rate": 1e-05, |
|
"loss": 0.6975, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"eval_accuracy": 0.8496849684968497, |
|
"eval_f1_score": 0.569984189542809, |
|
"eval_loss": 0.5573880672454834, |
|
"eval_precision": 0.7142864553492309, |
|
"eval_recall": 0.5934564957773306, |
|
"eval_runtime": 6.2462, |
|
"eval_samples_per_second": 533.607, |
|
"eval_steps_per_second": 8.485, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 9.066823959350586, |
|
"learning_rate": 9.722222222222223e-06, |
|
"loss": 0.5187, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"eval_accuracy": 0.8664866486648665, |
|
"eval_f1_score": 0.6685229055473728, |
|
"eval_loss": 0.46813032031059265, |
|
"eval_precision": 0.7076541248010603, |
|
"eval_recall": 0.6591572058707156, |
|
"eval_runtime": 6.2485, |
|
"eval_samples_per_second": 533.408, |
|
"eval_steps_per_second": 8.482, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"grad_norm": 5.799560070037842, |
|
"learning_rate": 9.444444444444445e-06, |
|
"loss": 0.4183, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"eval_accuracy": 0.8820882088208821, |
|
"eval_f1_score": 0.774744299508991, |
|
"eval_loss": 0.4121437668800354, |
|
"eval_precision": 0.8760939894193666, |
|
"eval_recall": 0.7478297020405164, |
|
"eval_runtime": 6.289, |
|
"eval_samples_per_second": 529.974, |
|
"eval_steps_per_second": 8.427, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"grad_norm": 5.299034595489502, |
|
"learning_rate": 9.166666666666666e-06, |
|
"loss": 0.3323, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"eval_accuracy": 0.903990399039904, |
|
"eval_f1_score": 0.850497109426447, |
|
"eval_loss": 0.34878164529800415, |
|
"eval_precision": 0.8647331643827426, |
|
"eval_recall": 0.839118025290519, |
|
"eval_runtime": 6.1906, |
|
"eval_samples_per_second": 538.4, |
|
"eval_steps_per_second": 8.561, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"grad_norm": 5.325865268707275, |
|
"learning_rate": 8.888888888888888e-06, |
|
"loss": 0.2705, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"eval_accuracy": 0.9123912391239124, |
|
"eval_f1_score": 0.8679529123789608, |
|
"eval_loss": 0.3178523778915405, |
|
"eval_precision": 0.8683198068711735, |
|
"eval_recall": 0.8693736512924785, |
|
"eval_runtime": 6.2028, |
|
"eval_samples_per_second": 537.341, |
|
"eval_steps_per_second": 8.545, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"grad_norm": 4.130426406860352, |
|
"learning_rate": 8.611111111111112e-06, |
|
"loss": 0.229, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"eval_accuracy": 0.915991599159916, |
|
"eval_f1_score": 0.8739115802564847, |
|
"eval_loss": 0.3108988404273987, |
|
"eval_precision": 0.8704138816567074, |
|
"eval_recall": 0.8778451486716982, |
|
"eval_runtime": 6.2053, |
|
"eval_samples_per_second": 537.123, |
|
"eval_steps_per_second": 8.541, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"grad_norm": 4.939031600952148, |
|
"learning_rate": 8.333333333333334e-06, |
|
"loss": 0.1964, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"eval_accuracy": 0.9174917491749175, |
|
"eval_f1_score": 0.8775661449676492, |
|
"eval_loss": 0.3028014600276947, |
|
"eval_precision": 0.8740691720600018, |
|
"eval_recall": 0.8812896711088255, |
|
"eval_runtime": 6.2156, |
|
"eval_samples_per_second": 536.232, |
|
"eval_steps_per_second": 8.527, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"grad_norm": 7.8568878173828125, |
|
"learning_rate": 8.055555555555557e-06, |
|
"loss": 0.1771, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"eval_accuracy": 0.918091809180918, |
|
"eval_f1_score": 0.8807318795078937, |
|
"eval_loss": 0.30322423577308655, |
|
"eval_precision": 0.8743317066593773, |
|
"eval_recall": 0.8877139802470034, |
|
"eval_runtime": 6.2526, |
|
"eval_samples_per_second": 533.058, |
|
"eval_steps_per_second": 8.476, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"grad_norm": 8.988609313964844, |
|
"learning_rate": 7.77777777777778e-06, |
|
"loss": 0.1518, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"eval_accuracy": 0.9165916591659166, |
|
"eval_f1_score": 0.8762082418137522, |
|
"eval_loss": 0.3150691092014313, |
|
"eval_precision": 0.8828474311875153, |
|
"eval_recall": 0.8702178521387077, |
|
"eval_runtime": 6.2783, |
|
"eval_samples_per_second": 530.878, |
|
"eval_steps_per_second": 8.442, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"grad_norm": 6.535445690155029, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.1368, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"eval_accuracy": 0.9213921392139214, |
|
"eval_f1_score": 0.8794257945528638, |
|
"eval_loss": 0.29376497864723206, |
|
"eval_precision": 0.8789472587468637, |
|
"eval_recall": 0.8800344348044677, |
|
"eval_runtime": 6.2765, |
|
"eval_samples_per_second": 531.025, |
|
"eval_steps_per_second": 8.444, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"grad_norm": 6.200416564941406, |
|
"learning_rate": 7.222222222222223e-06, |
|
"loss": 0.1116, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 13.4, |
|
"eval_accuracy": 0.9204920492049204, |
|
"eval_f1_score": 0.8794630237866472, |
|
"eval_loss": 0.29709574580192566, |
|
"eval_precision": 0.8776282905981169, |
|
"eval_recall": 0.881459841742562, |
|
"eval_runtime": 6.1898, |
|
"eval_samples_per_second": 538.463, |
|
"eval_steps_per_second": 8.562, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"grad_norm": 5.921875, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 0.1136, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 14.35, |
|
"eval_accuracy": 0.9234923492349235, |
|
"eval_f1_score": 0.8857825928642494, |
|
"eval_loss": 0.30112606287002563, |
|
"eval_precision": 0.8894251615389424, |
|
"eval_recall": 0.8825136450024287, |
|
"eval_runtime": 6.2125, |
|
"eval_samples_per_second": 536.5, |
|
"eval_steps_per_second": 8.531, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"grad_norm": 13.020410537719727, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.094, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 15.31, |
|
"eval_accuracy": 0.9267926792679267, |
|
"eval_f1_score": 0.8891204106050078, |
|
"eval_loss": 0.2937377691268921, |
|
"eval_precision": 0.8855321931614877, |
|
"eval_recall": 0.8933184630720276, |
|
"eval_runtime": 6.2824, |
|
"eval_samples_per_second": 530.532, |
|
"eval_steps_per_second": 8.436, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 16.27, |
|
"grad_norm": 6.572815418243408, |
|
"learning_rate": 6.3888888888888885e-06, |
|
"loss": 0.0905, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 16.27, |
|
"eval_accuracy": 0.9264926492649265, |
|
"eval_f1_score": 0.8849818091608542, |
|
"eval_loss": 0.30491721630096436, |
|
"eval_precision": 0.8885868021143595, |
|
"eval_recall": 0.8819482912765212, |
|
"eval_runtime": 6.1862, |
|
"eval_samples_per_second": 538.781, |
|
"eval_steps_per_second": 8.567, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"grad_norm": 11.92620849609375, |
|
"learning_rate": 6.111111111111112e-06, |
|
"loss": 0.0838, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 17.22, |
|
"eval_accuracy": 0.9243924392439244, |
|
"eval_f1_score": 0.8823178893108493, |
|
"eval_loss": 0.30605384707450867, |
|
"eval_precision": 0.8783548672468386, |
|
"eval_recall": 0.8869437658317699, |
|
"eval_runtime": 6.213, |
|
"eval_samples_per_second": 536.453, |
|
"eval_steps_per_second": 8.53, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 18.18, |
|
"grad_norm": 6.095840930938721, |
|
"learning_rate": 5.833333333333334e-06, |
|
"loss": 0.0749, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 18.18, |
|
"eval_accuracy": 0.9204920492049204, |
|
"eval_f1_score": 0.8771352609895953, |
|
"eval_loss": 0.3274901509284973, |
|
"eval_precision": 0.8717396476070715, |
|
"eval_recall": 0.883927539576653, |
|
"eval_runtime": 6.221, |
|
"eval_samples_per_second": 535.769, |
|
"eval_steps_per_second": 8.52, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 19.14, |
|
"grad_norm": 6.83023738861084, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 0.0686, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 19.14, |
|
"eval_accuracy": 0.9294929492949295, |
|
"eval_f1_score": 0.8915069438815207, |
|
"eval_loss": 0.3092004358768463, |
|
"eval_precision": 0.8845623861216612, |
|
"eval_recall": 0.8989552312613528, |
|
"eval_runtime": 6.2372, |
|
"eval_samples_per_second": 534.375, |
|
"eval_steps_per_second": 8.497, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 20.1, |
|
"grad_norm": 4.03262186050415, |
|
"learning_rate": 5.2777777777777785e-06, |
|
"loss": 0.0669, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 20.1, |
|
"eval_accuracy": 0.924992499249925, |
|
"eval_f1_score": 0.8835986677013299, |
|
"eval_loss": 0.3168080151081085, |
|
"eval_precision": 0.8824571821499484, |
|
"eval_recall": 0.8848697486050657, |
|
"eval_runtime": 6.2215, |
|
"eval_samples_per_second": 535.72, |
|
"eval_steps_per_second": 8.519, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 21.05, |
|
"grad_norm": 1.4791430234909058, |
|
"learning_rate": 5e-06, |
|
"loss": 0.0582, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 21.05, |
|
"eval_accuracy": 0.9234923492349235, |
|
"eval_f1_score": 0.8762756882804612, |
|
"eval_loss": 0.333869606256485, |
|
"eval_precision": 0.8630877156327875, |
|
"eval_recall": 0.8925977028354047, |
|
"eval_runtime": 6.2137, |
|
"eval_samples_per_second": 536.399, |
|
"eval_steps_per_second": 8.53, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 22.01, |
|
"grad_norm": 1.3378651142120361, |
|
"learning_rate": 4.722222222222222e-06, |
|
"loss": 0.0516, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 22.01, |
|
"eval_accuracy": 0.9267926792679267, |
|
"eval_f1_score": 0.8919460068914353, |
|
"eval_loss": 0.3273985981941223, |
|
"eval_precision": 0.8896937401913322, |
|
"eval_recall": 0.8944103789209331, |
|
"eval_runtime": 6.2539, |
|
"eval_samples_per_second": 532.947, |
|
"eval_steps_per_second": 8.475, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 22.97, |
|
"grad_norm": 2.3011834621429443, |
|
"learning_rate": 4.444444444444444e-06, |
|
"loss": 0.0543, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 22.97, |
|
"eval_accuracy": 0.9294929492949295, |
|
"eval_f1_score": 0.8912698081733191, |
|
"eval_loss": 0.32296448945999146, |
|
"eval_precision": 0.8946222329092709, |
|
"eval_recall": 0.8882296547805383, |
|
"eval_runtime": 6.2569, |
|
"eval_samples_per_second": 532.688, |
|
"eval_steps_per_second": 8.471, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 23.92, |
|
"grad_norm": 0.30257853865623474, |
|
"learning_rate": 4.166666666666667e-06, |
|
"loss": 0.0435, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 23.92, |
|
"eval_accuracy": 0.9252925292529253, |
|
"eval_f1_score": 0.8806373103770997, |
|
"eval_loss": 0.3364492952823639, |
|
"eval_precision": 0.8917652898214161, |
|
"eval_recall": 0.8705348522415428, |
|
"eval_runtime": 6.2476, |
|
"eval_samples_per_second": 533.488, |
|
"eval_steps_per_second": 8.483, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 24.88, |
|
"grad_norm": 4.352519512176514, |
|
"learning_rate": 3.88888888888889e-06, |
|
"loss": 0.0405, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 24.88, |
|
"eval_accuracy": 0.9240924092409241, |
|
"eval_f1_score": 0.8816233559879275, |
|
"eval_loss": 0.34915250539779663, |
|
"eval_precision": 0.8818560471877731, |
|
"eval_recall": 0.8820867223989518, |
|
"eval_runtime": 6.2343, |
|
"eval_samples_per_second": 534.62, |
|
"eval_steps_per_second": 8.501, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 25.84, |
|
"grad_norm": 1.6296316385269165, |
|
"learning_rate": 3.6111111111111115e-06, |
|
"loss": 0.0398, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 25.84, |
|
"eval_accuracy": 0.9237923792379238, |
|
"eval_f1_score": 0.8799317994806395, |
|
"eval_loss": 0.3558264374732971, |
|
"eval_precision": 0.8807466782442214, |
|
"eval_recall": 0.879644875034168, |
|
"eval_runtime": 6.2137, |
|
"eval_samples_per_second": 536.396, |
|
"eval_steps_per_second": 8.53, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 26.79, |
|
"grad_norm": 0.5989682078361511, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.0363, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 26.79, |
|
"eval_accuracy": 0.9222922292229223, |
|
"eval_f1_score": 0.8741787015976087, |
|
"eval_loss": 0.36046338081359863, |
|
"eval_precision": 0.8698137895606535, |
|
"eval_recall": 0.8795311139961707, |
|
"eval_runtime": 6.2828, |
|
"eval_samples_per_second": 530.494, |
|
"eval_steps_per_second": 8.436, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 26.79, |
|
"step": 2800, |
|
"total_flos": 6555293809710912.0, |
|
"train_loss": 0.28978255237851824, |
|
"train_runtime": 2707.5005, |
|
"train_samples_per_second": 189.104, |
|
"train_steps_per_second": 1.477 |
|
} |
|
], |
|
"logging_steps": 100, |
|
"max_steps": 4000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 39, |
|
"save_steps": 100, |
|
"total_flos": 6555293809710912.0, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|