|
{ |
|
"best_metric": 0.998085740410612, |
|
"best_model_checkpoint": "/content/drive/MyDrive/GolemPII/GolemPII-xlm-roberta-v1/checkpoint-7615", |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 7615, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03282994090610637, |
|
"grad_norm": 10.239672660827637, |
|
"learning_rate": 6.036745406824147e-06, |
|
"loss": 3.0294, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06565988181221274, |
|
"grad_norm": 1.969146966934204, |
|
"learning_rate": 1.2598425196850394e-05, |
|
"loss": 1.5238, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0984898227183191, |
|
"grad_norm": 0.7575042843818665, |
|
"learning_rate": 1.916010498687664e-05, |
|
"loss": 0.2974, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1313197636244255, |
|
"grad_norm": 0.6243991851806641, |
|
"learning_rate": 2.572178477690289e-05, |
|
"loss": 0.0568, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.16414970453053185, |
|
"grad_norm": 0.35771840810775757, |
|
"learning_rate": 3.228346456692913e-05, |
|
"loss": 0.031, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.1969796454366382, |
|
"grad_norm": 0.7596266865730286, |
|
"learning_rate": 3.8845144356955383e-05, |
|
"loss": 0.0205, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2298095863427446, |
|
"grad_norm": 0.4744904339313507, |
|
"learning_rate": 4.540682414698163e-05, |
|
"loss": 0.0189, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.262639527248851, |
|
"grad_norm": 1.39719557762146, |
|
"learning_rate": 4.989632291954659e-05, |
|
"loss": 0.0138, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.29546946815495734, |
|
"grad_norm": 0.29437175393104553, |
|
"learning_rate": 4.955073265136854e-05, |
|
"loss": 0.0125, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.3282994090610637, |
|
"grad_norm": 0.19857335090637207, |
|
"learning_rate": 4.920514238319049e-05, |
|
"loss": 0.0107, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.36112934996717005, |
|
"grad_norm": 0.34563446044921875, |
|
"learning_rate": 4.885955211501244e-05, |
|
"loss": 0.0106, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.3939592908732764, |
|
"grad_norm": 0.3963533639907837, |
|
"learning_rate": 4.851396184683439e-05, |
|
"loss": 0.0122, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.42678923177938277, |
|
"grad_norm": 0.37360525131225586, |
|
"learning_rate": 4.816837157865634e-05, |
|
"loss": 0.009, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4596191726854892, |
|
"grad_norm": 0.2154053896665573, |
|
"learning_rate": 4.78227813104783e-05, |
|
"loss": 0.0083, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.49244911359159554, |
|
"grad_norm": 0.33652621507644653, |
|
"learning_rate": 4.747719104230025e-05, |
|
"loss": 0.0078, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.525279054497702, |
|
"grad_norm": 0.5589196085929871, |
|
"learning_rate": 4.71316007741222e-05, |
|
"loss": 0.0088, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5581089954038083, |
|
"grad_norm": 0.33235111832618713, |
|
"learning_rate": 4.678601050594415e-05, |
|
"loss": 0.0064, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5909389363099147, |
|
"grad_norm": 0.36602529883384705, |
|
"learning_rate": 4.644042023776611e-05, |
|
"loss": 0.0068, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.623768877216021, |
|
"grad_norm": 1.011355996131897, |
|
"learning_rate": 4.609482996958806e-05, |
|
"loss": 0.0059, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6565988181221274, |
|
"grad_norm": 0.5964003205299377, |
|
"learning_rate": 4.574923970141001e-05, |
|
"loss": 0.0051, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6894287590282338, |
|
"grad_norm": 0.21312755346298218, |
|
"learning_rate": 4.540364943323196e-05, |
|
"loss": 0.0058, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.7222586999343401, |
|
"grad_norm": 0.19403104484081268, |
|
"learning_rate": 4.5058059165053913e-05, |
|
"loss": 0.0058, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7550886408404465, |
|
"grad_norm": 0.15370243787765503, |
|
"learning_rate": 4.4712468896875864e-05, |
|
"loss": 0.0051, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7879185817465528, |
|
"grad_norm": 0.3946862518787384, |
|
"learning_rate": 4.4366878628697815e-05, |
|
"loss": 0.0054, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8207485226526592, |
|
"grad_norm": 0.23363596200942993, |
|
"learning_rate": 4.402128836051977e-05, |
|
"loss": 0.0048, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8535784635587655, |
|
"grad_norm": 0.25897666811943054, |
|
"learning_rate": 4.367569809234172e-05, |
|
"loss": 0.007, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.886408404464872, |
|
"grad_norm": 0.15858227014541626, |
|
"learning_rate": 4.3330107824163674e-05, |
|
"loss": 0.0042, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.9192383453709784, |
|
"grad_norm": 0.13322387635707855, |
|
"learning_rate": 4.2984517555985625e-05, |
|
"loss": 0.005, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.9520682862770847, |
|
"grad_norm": 0.16873428225517273, |
|
"learning_rate": 4.2638927287807575e-05, |
|
"loss": 0.0046, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9848982271831911, |
|
"grad_norm": 0.18423955142498016, |
|
"learning_rate": 4.229333701962953e-05, |
|
"loss": 0.0058, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9993276594535457, |
|
"eval_f1": 0.9933933815429036, |
|
"eval_loss": 0.0024869011249393225, |
|
"eval_precision": 0.9931092525380043, |
|
"eval_recall": 0.9936776731732028, |
|
"eval_runtime": 76.0097, |
|
"eval_samples_per_second": 236.812, |
|
"eval_steps_per_second": 3.71, |
|
"step": 1523 |
|
}, |
|
{ |
|
"epoch": 1.0177281680892973, |
|
"grad_norm": 0.29311779141426086, |
|
"learning_rate": 4.1947746751451484e-05, |
|
"loss": 0.0041, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.050558108995404, |
|
"grad_norm": 0.10629381239414215, |
|
"learning_rate": 4.1602156483273434e-05, |
|
"loss": 0.0037, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.0833880499015103, |
|
"grad_norm": 0.18275995552539825, |
|
"learning_rate": 4.1256566215095385e-05, |
|
"loss": 0.0043, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.1162179908076166, |
|
"grad_norm": 0.06012125313282013, |
|
"learning_rate": 4.0910975946917336e-05, |
|
"loss": 0.0062, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.149047931713723, |
|
"grad_norm": 7.185174942016602, |
|
"learning_rate": 4.0565385678739286e-05, |
|
"loss": 0.0085, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.1818778726198294, |
|
"grad_norm": 0.07045840471982956, |
|
"learning_rate": 4.021979541056124e-05, |
|
"loss": 0.0031, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1.2147078135259357, |
|
"grad_norm": 0.23435795307159424, |
|
"learning_rate": 3.9874205142383195e-05, |
|
"loss": 0.0026, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1.247537754432042, |
|
"grad_norm": 0.046855829656124115, |
|
"learning_rate": 3.9528614874205145e-05, |
|
"loss": 0.0031, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1.2803676953381484, |
|
"grad_norm": 0.3116040527820587, |
|
"learning_rate": 3.9183024606027096e-05, |
|
"loss": 0.0032, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1.3131976362442548, |
|
"grad_norm": 0.10148299485445023, |
|
"learning_rate": 3.883743433784905e-05, |
|
"loss": 0.0026, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 1.3460275771503611, |
|
"grad_norm": 0.06928931921720505, |
|
"learning_rate": 3.8491844069671004e-05, |
|
"loss": 0.0028, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 1.3788575180564675, |
|
"grad_norm": 0.09313840419054031, |
|
"learning_rate": 3.8146253801492955e-05, |
|
"loss": 0.0027, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 1.4116874589625739, |
|
"grad_norm": 0.28150513768196106, |
|
"learning_rate": 3.7800663533314906e-05, |
|
"loss": 0.0032, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 1.4445173998686802, |
|
"grad_norm": 0.4722907841205597, |
|
"learning_rate": 3.745507326513685e-05, |
|
"loss": 0.0037, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 1.4773473407747866, |
|
"grad_norm": 0.5664809346199036, |
|
"learning_rate": 3.710948299695881e-05, |
|
"loss": 0.0026, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 1.510177281680893, |
|
"grad_norm": 0.03828972578048706, |
|
"learning_rate": 3.676389272878076e-05, |
|
"loss": 0.0022, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 1.5430072225869993, |
|
"grad_norm": 0.5701923370361328, |
|
"learning_rate": 3.641830246060271e-05, |
|
"loss": 0.0022, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 1.5758371634931057, |
|
"grad_norm": 0.15145182609558105, |
|
"learning_rate": 3.607271219242466e-05, |
|
"loss": 0.0029, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 1.608667104399212, |
|
"grad_norm": 0.055488649755716324, |
|
"learning_rate": 3.572712192424662e-05, |
|
"loss": 0.002, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 1.6414970453053184, |
|
"grad_norm": 0.2535063624382019, |
|
"learning_rate": 3.538153165606857e-05, |
|
"loss": 0.0024, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.6743269862114247, |
|
"grad_norm": 0.09548994898796082, |
|
"learning_rate": 3.503594138789052e-05, |
|
"loss": 0.0024, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.707156927117531, |
|
"grad_norm": 0.04930610582232475, |
|
"learning_rate": 3.469035111971247e-05, |
|
"loss": 0.0021, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.7399868680236374, |
|
"grad_norm": 0.12071087211370468, |
|
"learning_rate": 3.434476085153443e-05, |
|
"loss": 0.0023, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.7728168089297438, |
|
"grad_norm": 0.038167692720890045, |
|
"learning_rate": 3.399917058335638e-05, |
|
"loss": 0.0027, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.8056467498358502, |
|
"grad_norm": 0.016604498028755188, |
|
"learning_rate": 3.365358031517832e-05, |
|
"loss": 0.0025, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.8384766907419565, |
|
"grad_norm": 0.10711315274238586, |
|
"learning_rate": 3.330799004700028e-05, |
|
"loss": 0.0021, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.8713066316480629, |
|
"grad_norm": 0.10547716170549393, |
|
"learning_rate": 3.296239977882223e-05, |
|
"loss": 0.0029, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.9041365725541692, |
|
"grad_norm": 0.12901557981967926, |
|
"learning_rate": 3.261680951064418e-05, |
|
"loss": 0.0026, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.9369665134602758, |
|
"grad_norm": 0.16034963726997375, |
|
"learning_rate": 3.227121924246613e-05, |
|
"loss": 0.0031, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.9697964543663822, |
|
"grad_norm": 0.060961514711380005, |
|
"learning_rate": 3.192562897428809e-05, |
|
"loss": 0.0017, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9995754536025347, |
|
"eval_f1": 0.995708295650744, |
|
"eval_loss": 0.0013849161332473159, |
|
"eval_precision": 0.9954693109933322, |
|
"eval_recall": 0.9959473950829271, |
|
"eval_runtime": 75.6987, |
|
"eval_samples_per_second": 237.785, |
|
"eval_steps_per_second": 3.725, |
|
"step": 3046 |
|
}, |
|
{ |
|
"epoch": 2.0026263952724883, |
|
"grad_norm": 0.08297313749790192, |
|
"learning_rate": 3.158003870611004e-05, |
|
"loss": 0.0026, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 2.0354563361785947, |
|
"grad_norm": 0.19145315885543823, |
|
"learning_rate": 3.123444843793199e-05, |
|
"loss": 0.0014, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 2.0682862770847015, |
|
"grad_norm": 0.08754308521747589, |
|
"learning_rate": 3.088885816975394e-05, |
|
"loss": 0.0014, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 2.101116217990808, |
|
"grad_norm": 0.1580217033624649, |
|
"learning_rate": 3.054326790157589e-05, |
|
"loss": 0.0016, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 2.133946158896914, |
|
"grad_norm": 0.022689586505293846, |
|
"learning_rate": 3.0197677633397846e-05, |
|
"loss": 0.0016, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 2.1667760998030205, |
|
"grad_norm": 0.10402818024158478, |
|
"learning_rate": 2.9852087365219793e-05, |
|
"loss": 0.0015, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 2.199606040709127, |
|
"grad_norm": 0.11667637526988983, |
|
"learning_rate": 2.9506497097041747e-05, |
|
"loss": 0.002, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 2.2324359816152333, |
|
"grad_norm": 0.24381309747695923, |
|
"learning_rate": 2.9160906828863698e-05, |
|
"loss": 0.0039, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 2.2652659225213396, |
|
"grad_norm": 0.03473767638206482, |
|
"learning_rate": 2.8815316560685652e-05, |
|
"loss": 0.0016, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 2.298095863427446, |
|
"grad_norm": 1.3230725526809692, |
|
"learning_rate": 2.8469726292507603e-05, |
|
"loss": 0.0016, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 2.3309258043335523, |
|
"grad_norm": 0.1341700553894043, |
|
"learning_rate": 2.8124136024329557e-05, |
|
"loss": 0.0047, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 2.3637557452396587, |
|
"grad_norm": 0.13513527810573578, |
|
"learning_rate": 2.7778545756151508e-05, |
|
"loss": 0.0018, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 2.396585686145765, |
|
"grad_norm": 0.0577649362385273, |
|
"learning_rate": 2.743295548797346e-05, |
|
"loss": 0.0014, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 2.4294156270518714, |
|
"grad_norm": 0.1941065490245819, |
|
"learning_rate": 2.7087365219795412e-05, |
|
"loss": 0.0011, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 2.4622455679579778, |
|
"grad_norm": 0.15985670685768127, |
|
"learning_rate": 2.6741774951617367e-05, |
|
"loss": 0.0016, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 2.495075508864084, |
|
"grad_norm": 0.08614476025104523, |
|
"learning_rate": 2.6396184683439317e-05, |
|
"loss": 0.0016, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 2.5279054497701905, |
|
"grad_norm": 0.15596601366996765, |
|
"learning_rate": 2.605059441526127e-05, |
|
"loss": 0.0016, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 2.560735390676297, |
|
"grad_norm": 0.12409207224845886, |
|
"learning_rate": 2.5705004147083215e-05, |
|
"loss": 0.0015, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 2.593565331582403, |
|
"grad_norm": 0.189951092004776, |
|
"learning_rate": 2.535941387890517e-05, |
|
"loss": 0.0015, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 2.6263952724885096, |
|
"grad_norm": 0.059304554015398026, |
|
"learning_rate": 2.501382361072712e-05, |
|
"loss": 0.0014, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 2.659225213394616, |
|
"grad_norm": 0.24233925342559814, |
|
"learning_rate": 2.4668233342549074e-05, |
|
"loss": 0.0013, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 2.6920551543007223, |
|
"grad_norm": 0.2739645838737488, |
|
"learning_rate": 2.4322643074371025e-05, |
|
"loss": 0.0012, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 2.7248850952068286, |
|
"grad_norm": 0.11855873465538025, |
|
"learning_rate": 2.397705280619298e-05, |
|
"loss": 0.0019, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 2.757715036112935, |
|
"grad_norm": 0.05818852037191391, |
|
"learning_rate": 2.363146253801493e-05, |
|
"loss": 0.0027, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 2.7905449770190414, |
|
"grad_norm": 0.06046278029680252, |
|
"learning_rate": 2.3285872269836884e-05, |
|
"loss": 0.0014, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 2.8233749179251477, |
|
"grad_norm": 0.04216454550623894, |
|
"learning_rate": 2.2940282001658835e-05, |
|
"loss": 0.0011, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 2.856204858831254, |
|
"grad_norm": 0.0564955435693264, |
|
"learning_rate": 2.2594691733480785e-05, |
|
"loss": 0.0015, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 2.8890347997373604, |
|
"grad_norm": 0.10374019294977188, |
|
"learning_rate": 2.224910146530274e-05, |
|
"loss": 0.0013, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 2.921864740643467, |
|
"grad_norm": 0.2742884159088135, |
|
"learning_rate": 2.190351119712469e-05, |
|
"loss": 0.0014, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 2.954694681549573, |
|
"grad_norm": 0.02845386229455471, |
|
"learning_rate": 2.155792092894664e-05, |
|
"loss": 0.0013, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 2.9875246224556795, |
|
"grad_norm": 0.10036207735538483, |
|
"learning_rate": 2.1212330660768595e-05, |
|
"loss": 0.0012, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9997391341413165, |
|
"eval_f1": 0.9973228263586491, |
|
"eval_loss": 0.0009462219895794988, |
|
"eval_precision": 0.9971588480180992, |
|
"eval_recall": 0.9974868586390879, |
|
"eval_runtime": 75.644, |
|
"eval_samples_per_second": 237.957, |
|
"eval_steps_per_second": 3.728, |
|
"step": 4569 |
|
}, |
|
{ |
|
"epoch": 3.020354563361786, |
|
"grad_norm": 0.08079813420772552, |
|
"learning_rate": 2.0866740392590546e-05, |
|
"loss": 0.0011, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 3.0531845042678922, |
|
"grad_norm": 0.09932583570480347, |
|
"learning_rate": 2.0521150124412497e-05, |
|
"loss": 0.0023, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 3.0860144451739986, |
|
"grad_norm": 0.05140871927142143, |
|
"learning_rate": 2.017555985623445e-05, |
|
"loss": 0.001, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 3.118844386080105, |
|
"grad_norm": 0.19095173478126526, |
|
"learning_rate": 1.98299695880564e-05, |
|
"loss": 0.001, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 3.1516743269862113, |
|
"grad_norm": 0.041814982891082764, |
|
"learning_rate": 1.9484379319878356e-05, |
|
"loss": 0.0018, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 3.1845042678923177, |
|
"grad_norm": 0.025649981573224068, |
|
"learning_rate": 1.9138789051700303e-05, |
|
"loss": 0.0008, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 3.217334208798424, |
|
"grad_norm": 0.12662899494171143, |
|
"learning_rate": 1.8793198783522257e-05, |
|
"loss": 0.0009, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 3.2501641497045304, |
|
"grad_norm": 0.10787362605333328, |
|
"learning_rate": 1.8447608515344208e-05, |
|
"loss": 0.0008, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 3.2829940906106367, |
|
"grad_norm": 0.03877755254507065, |
|
"learning_rate": 1.8102018247166162e-05, |
|
"loss": 0.0008, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 3.315824031516743, |
|
"grad_norm": 0.11100368201732635, |
|
"learning_rate": 1.7756427978988113e-05, |
|
"loss": 0.0007, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 3.3486539724228495, |
|
"grad_norm": 0.027431080117821693, |
|
"learning_rate": 1.7410837710810067e-05, |
|
"loss": 0.0008, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 3.381483913328956, |
|
"grad_norm": 0.013812090270221233, |
|
"learning_rate": 1.7065247442632014e-05, |
|
"loss": 0.0008, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 3.414313854235062, |
|
"grad_norm": 0.1110658347606659, |
|
"learning_rate": 1.6719657174453968e-05, |
|
"loss": 0.0022, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 3.4471437951411685, |
|
"grad_norm": 0.11397113651037216, |
|
"learning_rate": 1.637406690627592e-05, |
|
"loss": 0.0009, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 3.479973736047275, |
|
"grad_norm": 0.2407987117767334, |
|
"learning_rate": 1.6028476638097873e-05, |
|
"loss": 0.0008, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 3.5128036769533812, |
|
"grad_norm": 0.06540926545858383, |
|
"learning_rate": 1.5682886369919824e-05, |
|
"loss": 0.001, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 3.5456336178594876, |
|
"grad_norm": 0.08520480245351791, |
|
"learning_rate": 1.5337296101741775e-05, |
|
"loss": 0.0007, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 3.578463558765594, |
|
"grad_norm": 0.091216079890728, |
|
"learning_rate": 1.4991705833563727e-05, |
|
"loss": 0.0011, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 3.6112934996717003, |
|
"grad_norm": 0.1052037924528122, |
|
"learning_rate": 1.464611556538568e-05, |
|
"loss": 0.0008, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 3.6441234405778067, |
|
"grad_norm": 0.30819040536880493, |
|
"learning_rate": 1.4300525297207632e-05, |
|
"loss": 0.0007, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 3.6769533814839135, |
|
"grad_norm": 0.06232546642422676, |
|
"learning_rate": 1.3954935029029584e-05, |
|
"loss": 0.0005, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 3.70978332239002, |
|
"grad_norm": 0.08547823131084442, |
|
"learning_rate": 1.3609344760851537e-05, |
|
"loss": 0.0006, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 3.742613263296126, |
|
"grad_norm": 0.12835901975631714, |
|
"learning_rate": 1.3263754492673486e-05, |
|
"loss": 0.0028, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 3.7754432042022326, |
|
"grad_norm": 0.07907555252313614, |
|
"learning_rate": 1.2918164224495438e-05, |
|
"loss": 0.001, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 3.808273145108339, |
|
"grad_norm": 0.03760789334774017, |
|
"learning_rate": 1.257257395631739e-05, |
|
"loss": 0.0008, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 3.8411030860144453, |
|
"grad_norm": 0.008074942976236343, |
|
"learning_rate": 1.2226983688139343e-05, |
|
"loss": 0.0007, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 3.8739330269205516, |
|
"grad_norm": 0.039751049131155014, |
|
"learning_rate": 1.1881393419961294e-05, |
|
"loss": 0.0008, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 3.906762967826658, |
|
"grad_norm": 0.1129092127084732, |
|
"learning_rate": 1.1535803151783246e-05, |
|
"loss": 0.0009, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 3.9395929087327644, |
|
"grad_norm": 0.030842676758766174, |
|
"learning_rate": 1.1190212883605199e-05, |
|
"loss": 0.0006, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 3.9724228496388707, |
|
"grad_norm": 0.24821631610393524, |
|
"learning_rate": 1.084462261542715e-05, |
|
"loss": 0.0009, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9997499325101945, |
|
"eval_f1": 0.9977469995625562, |
|
"eval_loss": 0.0008962151478044689, |
|
"eval_precision": 0.9976255936015996, |
|
"eval_recall": 0.997868435076085, |
|
"eval_runtime": 75.9398, |
|
"eval_samples_per_second": 237.03, |
|
"eval_steps_per_second": 3.713, |
|
"step": 6092 |
|
}, |
|
{ |
|
"epoch": 4.005252790544977, |
|
"grad_norm": 0.13012520968914032, |
|
"learning_rate": 1.0499032347249102e-05, |
|
"loss": 0.0007, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 4.038082731451083, |
|
"grad_norm": 0.008424765430390835, |
|
"learning_rate": 1.0153442079071054e-05, |
|
"loss": 0.0005, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 4.070912672357189, |
|
"grad_norm": 0.08021287620067596, |
|
"learning_rate": 9.807851810893005e-06, |
|
"loss": 0.0005, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 4.103742613263296, |
|
"grad_norm": 0.015998413786292076, |
|
"learning_rate": 9.462261542714957e-06, |
|
"loss": 0.0003, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 4.136572554169403, |
|
"grad_norm": 0.09457238763570786, |
|
"learning_rate": 9.11667127453691e-06, |
|
"loss": 0.0004, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 4.169402495075509, |
|
"grad_norm": 0.04868745431303978, |
|
"learning_rate": 8.77108100635886e-06, |
|
"loss": 0.0008, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 4.202232435981616, |
|
"grad_norm": 0.033117685467004776, |
|
"learning_rate": 8.425490738180813e-06, |
|
"loss": 0.0004, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 4.235062376887722, |
|
"grad_norm": 0.03128530830144882, |
|
"learning_rate": 8.079900470002765e-06, |
|
"loss": 0.0003, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 4.267892317793828, |
|
"grad_norm": 0.017696760594844818, |
|
"learning_rate": 7.734310201824716e-06, |
|
"loss": 0.0004, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 4.300722258699935, |
|
"grad_norm": 0.051392361521720886, |
|
"learning_rate": 7.388719933646669e-06, |
|
"loss": 0.0004, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 4.333552199606041, |
|
"grad_norm": 0.023702796548604965, |
|
"learning_rate": 7.04312966546862e-06, |
|
"loss": 0.0005, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 4.3663821405121475, |
|
"grad_norm": 0.12985366582870483, |
|
"learning_rate": 6.6975393972905724e-06, |
|
"loss": 0.0007, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 4.399212081418254, |
|
"grad_norm": 0.005154111888259649, |
|
"learning_rate": 6.351949129112525e-06, |
|
"loss": 0.0004, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 4.43204202232436, |
|
"grad_norm": 0.060625988990068436, |
|
"learning_rate": 6.0063588609344764e-06, |
|
"loss": 0.0004, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 4.4648719632304665, |
|
"grad_norm": 0.014278898015618324, |
|
"learning_rate": 5.660768592756428e-06, |
|
"loss": 0.0005, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 4.497701904136573, |
|
"grad_norm": 0.03604033589363098, |
|
"learning_rate": 5.31517832457838e-06, |
|
"loss": 0.0003, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 4.530531845042679, |
|
"grad_norm": 0.11452529579401016, |
|
"learning_rate": 4.969588056400332e-06, |
|
"loss": 0.0006, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 4.563361785948786, |
|
"grad_norm": 0.030383341014385223, |
|
"learning_rate": 4.623997788222284e-06, |
|
"loss": 0.0016, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 4.596191726854892, |
|
"grad_norm": 0.054634325206279755, |
|
"learning_rate": 4.278407520044236e-06, |
|
"loss": 0.0004, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 4.629021667760998, |
|
"grad_norm": 0.05445707589387894, |
|
"learning_rate": 3.932817251866188e-06, |
|
"loss": 0.0018, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 4.661851608667105, |
|
"grad_norm": 0.10374827682971954, |
|
"learning_rate": 3.5872269836881396e-06, |
|
"loss": 0.0007, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 4.694681549573211, |
|
"grad_norm": 0.07730349153280258, |
|
"learning_rate": 3.2416367155100916e-06, |
|
"loss": 0.0005, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 4.727511490479317, |
|
"grad_norm": 0.06179986149072647, |
|
"learning_rate": 2.896046447332043e-06, |
|
"loss": 0.0004, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 4.760341431385424, |
|
"grad_norm": 0.015075119212269783, |
|
"learning_rate": 2.550456179153995e-06, |
|
"loss": 0.0011, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 4.79317137229153, |
|
"grad_norm": 0.0656493604183197, |
|
"learning_rate": 2.204865910975947e-06, |
|
"loss": 0.0004, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 4.8260013131976365, |
|
"grad_norm": 0.017132466658949852, |
|
"learning_rate": 1.8592756427978988e-06, |
|
"loss": 0.0004, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 4.858831254103743, |
|
"grad_norm": 0.02434193529188633, |
|
"learning_rate": 1.5136853746198508e-06, |
|
"loss": 0.0005, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 4.891661195009849, |
|
"grad_norm": 0.0716409906744957, |
|
"learning_rate": 1.1680951064418028e-06, |
|
"loss": 0.0004, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 4.9244911359159556, |
|
"grad_norm": 0.017711373046040535, |
|
"learning_rate": 8.225048382637546e-07, |
|
"loss": 0.0004, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 4.957321076822062, |
|
"grad_norm": 0.0037664847914129496, |
|
"learning_rate": 4.769145700857065e-07, |
|
"loss": 0.0004, |
|
"step": 7550 |
|
}, |
|
{ |
|
"epoch": 4.990151017728168, |
|
"grad_norm": 0.001125144655816257, |
|
"learning_rate": 1.3132430190765827e-07, |
|
"loss": 0.0006, |
|
"step": 7600 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9997948309913187, |
|
"eval_f1": 0.998085740410612, |
|
"eval_loss": 0.0007292991504073143, |
|
"eval_precision": 0.9979807015582144, |
|
"eval_recall": 0.9981908013763068, |
|
"eval_runtime": 76.3485, |
|
"eval_samples_per_second": 235.761, |
|
"eval_steps_per_second": 3.694, |
|
"step": 7615 |
|
} |
|
], |
|
"logging_steps": 50, |
|
"max_steps": 7615, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.27347239453568e+17, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|