|
{ |
|
"best_metric": 0.8391068037456617, |
|
"best_model_checkpoint": "/home/ubuntu/utah/babylm-24/src/evaluation/results/finetune/DebertaV2-Base-10M_babylm-A/qqp/checkpoint-227405", |
|
"epoch": 5.0, |
|
"eval_steps": 500, |
|
"global_step": 227405, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01099360172379675, |
|
"grad_norm": 4.236701011657715, |
|
"learning_rate": 2.993403838965722e-05, |
|
"loss": 0.6086, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.0219872034475935, |
|
"grad_norm": 4.105135440826416, |
|
"learning_rate": 2.986807677931444e-05, |
|
"loss": 0.5618, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03298080517139025, |
|
"grad_norm": 5.613928318023682, |
|
"learning_rate": 2.980211516897166e-05, |
|
"loss": 0.5252, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.043974406895187, |
|
"grad_norm": 12.754159927368164, |
|
"learning_rate": 2.973615355862888e-05, |
|
"loss": 0.5182, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05496800861898375, |
|
"grad_norm": 5.488389015197754, |
|
"learning_rate": 2.9670191948286098e-05, |
|
"loss": 0.505, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.0659616103427805, |
|
"grad_norm": 5.46230936050415, |
|
"learning_rate": 2.9604230337943316e-05, |
|
"loss": 0.5044, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07695521206657725, |
|
"grad_norm": 13.249972343444824, |
|
"learning_rate": 2.9538268727600537e-05, |
|
"loss": 0.498, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.087948813790374, |
|
"grad_norm": 13.043343544006348, |
|
"learning_rate": 2.9472307117257757e-05, |
|
"loss": 0.479, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.09894241551417075, |
|
"grad_norm": 14.147295951843262, |
|
"learning_rate": 2.9406345506914978e-05, |
|
"loss": 0.4692, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.1099360172379675, |
|
"grad_norm": 8.739255905151367, |
|
"learning_rate": 2.9340383896572196e-05, |
|
"loss": 0.4703, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.12092961896176425, |
|
"grad_norm": 11.371148109436035, |
|
"learning_rate": 2.9274422286229417e-05, |
|
"loss": 0.4557, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.131923220685561, |
|
"grad_norm": 8.44424057006836, |
|
"learning_rate": 2.9208460675886637e-05, |
|
"loss": 0.4635, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.14291682240935774, |
|
"grad_norm": 3.9929113388061523, |
|
"learning_rate": 2.914249906554385e-05, |
|
"loss": 0.4651, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.1539104241331545, |
|
"grad_norm": 3.4015634059906006, |
|
"learning_rate": 2.9076537455201072e-05, |
|
"loss": 0.4537, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.16490402585695124, |
|
"grad_norm": 4.32379674911499, |
|
"learning_rate": 2.9010575844858293e-05, |
|
"loss": 0.4445, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.175897627580748, |
|
"grad_norm": 5.210988521575928, |
|
"learning_rate": 2.8944614234515514e-05, |
|
"loss": 0.4326, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.18689122930454474, |
|
"grad_norm": 3.908278226852417, |
|
"learning_rate": 2.887865262417273e-05, |
|
"loss": 0.4454, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.1978848310283415, |
|
"grad_norm": 2.8256139755249023, |
|
"learning_rate": 2.8812691013829952e-05, |
|
"loss": 0.438, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.20887843275213824, |
|
"grad_norm": 7.012328624725342, |
|
"learning_rate": 2.8746729403487173e-05, |
|
"loss": 0.4333, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.219872034475935, |
|
"grad_norm": 5.030713081359863, |
|
"learning_rate": 2.8680767793144394e-05, |
|
"loss": 0.4588, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.23086563619973174, |
|
"grad_norm": 10.047335624694824, |
|
"learning_rate": 2.8614806182801608e-05, |
|
"loss": 0.4418, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.2418592379235285, |
|
"grad_norm": 5.562270164489746, |
|
"learning_rate": 2.854884457245883e-05, |
|
"loss": 0.4375, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.25285283964732524, |
|
"grad_norm": 14.433082580566406, |
|
"learning_rate": 2.848288296211605e-05, |
|
"loss": 0.4278, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.263846441371122, |
|
"grad_norm": 3.507910966873169, |
|
"learning_rate": 2.8416921351773267e-05, |
|
"loss": 0.4331, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.2748400430949188, |
|
"grad_norm": 13.184822082519531, |
|
"learning_rate": 2.8350959741430488e-05, |
|
"loss": 0.4325, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.2858336448187155, |
|
"grad_norm": 11.627528190612793, |
|
"learning_rate": 2.828499813108771e-05, |
|
"loss": 0.4283, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.29682724654251225, |
|
"grad_norm": 4.054474830627441, |
|
"learning_rate": 2.821903652074493e-05, |
|
"loss": 0.4361, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.307820848266309, |
|
"grad_norm": 7.193812847137451, |
|
"learning_rate": 2.8153074910402147e-05, |
|
"loss": 0.4251, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.3188144499901058, |
|
"grad_norm": 4.036972999572754, |
|
"learning_rate": 2.8087113300059365e-05, |
|
"loss": 0.4233, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.3298080517139025, |
|
"grad_norm": 6.451129913330078, |
|
"learning_rate": 2.8021151689716586e-05, |
|
"loss": 0.4407, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.34080165343769925, |
|
"grad_norm": 6.5472612380981445, |
|
"learning_rate": 2.7955190079373803e-05, |
|
"loss": 0.4158, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.351795255161496, |
|
"grad_norm": 16.589092254638672, |
|
"learning_rate": 2.7889228469031024e-05, |
|
"loss": 0.4261, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.3627888568852928, |
|
"grad_norm": 6.696326732635498, |
|
"learning_rate": 2.7823266858688245e-05, |
|
"loss": 0.4111, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.3737824586090895, |
|
"grad_norm": 4.396112442016602, |
|
"learning_rate": 2.7757305248345466e-05, |
|
"loss": 0.4236, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.38477606033288625, |
|
"grad_norm": 7.1260986328125, |
|
"learning_rate": 2.7691343638002683e-05, |
|
"loss": 0.413, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.395769662056683, |
|
"grad_norm": 8.553855895996094, |
|
"learning_rate": 2.7625382027659904e-05, |
|
"loss": 0.4105, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.4067632637804798, |
|
"grad_norm": 1.411335825920105, |
|
"learning_rate": 2.755942041731712e-05, |
|
"loss": 0.3977, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.4177568655042765, |
|
"grad_norm": 12.823638916015625, |
|
"learning_rate": 2.7493458806974342e-05, |
|
"loss": 0.4143, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.42875046722807325, |
|
"grad_norm": 11.823991775512695, |
|
"learning_rate": 2.742749719663156e-05, |
|
"loss": 0.4002, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.43974406895187, |
|
"grad_norm": 5.152065753936768, |
|
"learning_rate": 2.736153558628878e-05, |
|
"loss": 0.4001, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.4507376706756668, |
|
"grad_norm": 2.0797653198242188, |
|
"learning_rate": 2.7295573975946e-05, |
|
"loss": 0.4137, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.4617312723994635, |
|
"grad_norm": 5.874008655548096, |
|
"learning_rate": 2.722961236560322e-05, |
|
"loss": 0.4166, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.47272487412326025, |
|
"grad_norm": 20.658824920654297, |
|
"learning_rate": 2.716365075526044e-05, |
|
"loss": 0.4072, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.483718475847057, |
|
"grad_norm": 11.262660026550293, |
|
"learning_rate": 2.709768914491766e-05, |
|
"loss": 0.4048, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.4947120775708538, |
|
"grad_norm": 8.16556167602539, |
|
"learning_rate": 2.7031727534574878e-05, |
|
"loss": 0.4059, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.5057056792946505, |
|
"grad_norm": 12.176790237426758, |
|
"learning_rate": 2.6965765924232095e-05, |
|
"loss": 0.415, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.5166992810184473, |
|
"grad_norm": 8.630789756774902, |
|
"learning_rate": 2.6899804313889316e-05, |
|
"loss": 0.4059, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.527692882742244, |
|
"grad_norm": 11.014044761657715, |
|
"learning_rate": 2.6833842703546537e-05, |
|
"loss": 0.3804, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.5386864844660407, |
|
"grad_norm": 12.287202835083008, |
|
"learning_rate": 2.6767881093203758e-05, |
|
"loss": 0.4046, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.5496800861898375, |
|
"grad_norm": 6.118470668792725, |
|
"learning_rate": 2.6701919482860975e-05, |
|
"loss": 0.398, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.5606736879136343, |
|
"grad_norm": 17.233190536499023, |
|
"learning_rate": 2.6635957872518196e-05, |
|
"loss": 0.4013, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.571667289637431, |
|
"grad_norm": 15.902141571044922, |
|
"learning_rate": 2.6569996262175417e-05, |
|
"loss": 0.4158, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.5826608913612278, |
|
"grad_norm": 6.975952625274658, |
|
"learning_rate": 2.650403465183263e-05, |
|
"loss": 0.3939, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.5936544930850245, |
|
"grad_norm": 19.319835662841797, |
|
"learning_rate": 2.6438073041489852e-05, |
|
"loss": 0.4021, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.6046480948088213, |
|
"grad_norm": 3.9395010471343994, |
|
"learning_rate": 2.6372111431147073e-05, |
|
"loss": 0.4015, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.615641696532618, |
|
"grad_norm": 7.098001956939697, |
|
"learning_rate": 2.6306149820804294e-05, |
|
"loss": 0.3876, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.6266352982564147, |
|
"grad_norm": 3.967722177505493, |
|
"learning_rate": 2.624018821046151e-05, |
|
"loss": 0.3935, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.6376288999802115, |
|
"grad_norm": 5.257786273956299, |
|
"learning_rate": 2.6174226600118732e-05, |
|
"loss": 0.3959, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.6486225017040083, |
|
"grad_norm": 17.10645294189453, |
|
"learning_rate": 2.6108264989775953e-05, |
|
"loss": 0.3948, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.659616103427805, |
|
"grad_norm": 14.60950756072998, |
|
"learning_rate": 2.6042303379433174e-05, |
|
"loss": 0.4078, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.6706097051516018, |
|
"grad_norm": 6.0776166915893555, |
|
"learning_rate": 2.5976341769090388e-05, |
|
"loss": 0.3831, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.6816033068753985, |
|
"grad_norm": 1.830690622329712, |
|
"learning_rate": 2.591038015874761e-05, |
|
"loss": 0.3911, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.6925969085991953, |
|
"grad_norm": 16.717496871948242, |
|
"learning_rate": 2.584441854840483e-05, |
|
"loss": 0.3926, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.703590510322992, |
|
"grad_norm": 4.424517631530762, |
|
"learning_rate": 2.5778456938062047e-05, |
|
"loss": 0.3931, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.7145841120467887, |
|
"grad_norm": 6.995429992675781, |
|
"learning_rate": 2.5712495327719268e-05, |
|
"loss": 0.3912, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.7255777137705856, |
|
"grad_norm": 6.78953742980957, |
|
"learning_rate": 2.564653371737649e-05, |
|
"loss": 0.3858, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.7365713154943823, |
|
"grad_norm": 12.592966079711914, |
|
"learning_rate": 2.558057210703371e-05, |
|
"loss": 0.3822, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.747564917218179, |
|
"grad_norm": 1.8982641696929932, |
|
"learning_rate": 2.5514610496690927e-05, |
|
"loss": 0.3964, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.7585585189419758, |
|
"grad_norm": 3.2166171073913574, |
|
"learning_rate": 2.5448648886348144e-05, |
|
"loss": 0.3902, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.7695521206657725, |
|
"grad_norm": 10.258162498474121, |
|
"learning_rate": 2.5382687276005365e-05, |
|
"loss": 0.3691, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.7805457223895693, |
|
"grad_norm": 2.9836630821228027, |
|
"learning_rate": 2.5316725665662583e-05, |
|
"loss": 0.3937, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.791539324113366, |
|
"grad_norm": 10.085326194763184, |
|
"learning_rate": 2.5250764055319804e-05, |
|
"loss": 0.3897, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.8025329258371627, |
|
"grad_norm": 8.519903182983398, |
|
"learning_rate": 2.5184802444977024e-05, |
|
"loss": 0.361, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.8135265275609596, |
|
"grad_norm": 4.746450901031494, |
|
"learning_rate": 2.5118840834634245e-05, |
|
"loss": 0.3845, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.8245201292847563, |
|
"grad_norm": 13.058253288269043, |
|
"learning_rate": 2.5052879224291463e-05, |
|
"loss": 0.3871, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.835513731008553, |
|
"grad_norm": 2.8094441890716553, |
|
"learning_rate": 2.4986917613948684e-05, |
|
"loss": 0.4012, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.8465073327323498, |
|
"grad_norm": 4.1435770988464355, |
|
"learning_rate": 2.49209560036059e-05, |
|
"loss": 0.3802, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.8575009344561465, |
|
"grad_norm": 12.436211585998535, |
|
"learning_rate": 2.4854994393263122e-05, |
|
"loss": 0.3705, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.8684945361799433, |
|
"grad_norm": 16.29452133178711, |
|
"learning_rate": 2.478903278292034e-05, |
|
"loss": 0.3921, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.87948813790374, |
|
"grad_norm": 6.898037910461426, |
|
"learning_rate": 2.472307117257756e-05, |
|
"loss": 0.3925, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.8904817396275367, |
|
"grad_norm": 6.402541160583496, |
|
"learning_rate": 2.465710956223478e-05, |
|
"loss": 0.3771, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.9014753413513336, |
|
"grad_norm": 3.24283504486084, |
|
"learning_rate": 2.4591147951892e-05, |
|
"loss": 0.3698, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.9124689430751303, |
|
"grad_norm": 7.773282527923584, |
|
"learning_rate": 2.452518634154922e-05, |
|
"loss": 0.3823, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.923462544798927, |
|
"grad_norm": 4.645416736602783, |
|
"learning_rate": 2.445922473120644e-05, |
|
"loss": 0.3792, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.9344561465227238, |
|
"grad_norm": 8.859955787658691, |
|
"learning_rate": 2.4393263120863658e-05, |
|
"loss": 0.3988, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.9454497482465205, |
|
"grad_norm": 5.384950160980225, |
|
"learning_rate": 2.4327301510520875e-05, |
|
"loss": 0.367, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.9564433499703173, |
|
"grad_norm": 21.994779586791992, |
|
"learning_rate": 2.4261339900178096e-05, |
|
"loss": 0.3768, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.967436951694114, |
|
"grad_norm": 9.221137046813965, |
|
"learning_rate": 2.4195378289835317e-05, |
|
"loss": 0.3767, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.9784305534179107, |
|
"grad_norm": 1.9626529216766357, |
|
"learning_rate": 2.4129416679492538e-05, |
|
"loss": 0.3626, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.9894241551417076, |
|
"grad_norm": 19.04450798034668, |
|
"learning_rate": 2.4063455069149755e-05, |
|
"loss": 0.3754, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.8456591367721558, |
|
"eval_f1": 0.7902943944078505, |
|
"eval_loss": 0.37383726239204407, |
|
"eval_mcc": 0.6683280718139261, |
|
"eval_runtime": 29.7825, |
|
"eval_samples_per_second": 678.754, |
|
"eval_steps_per_second": 84.848, |
|
"step": 45481 |
|
}, |
|
{ |
|
"epoch": 1.0004177568655044, |
|
"grad_norm": 14.85083293914795, |
|
"learning_rate": 2.3997493458806976e-05, |
|
"loss": 0.3783, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 1.011411358589301, |
|
"grad_norm": 7.004974365234375, |
|
"learning_rate": 2.3931531848464197e-05, |
|
"loss": 0.3307, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 1.0224049603130978, |
|
"grad_norm": 1.900647759437561, |
|
"learning_rate": 2.386557023812141e-05, |
|
"loss": 0.3454, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 1.0333985620368946, |
|
"grad_norm": 13.517301559448242, |
|
"learning_rate": 2.379960862777863e-05, |
|
"loss": 0.3404, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 1.0443921637606912, |
|
"grad_norm": 11.086533546447754, |
|
"learning_rate": 2.3733647017435853e-05, |
|
"loss": 0.344, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 1.055385765484488, |
|
"grad_norm": 1.5423752069473267, |
|
"learning_rate": 2.3667685407093073e-05, |
|
"loss": 0.348, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 1.0663793672082849, |
|
"grad_norm": 13.40974235534668, |
|
"learning_rate": 2.360172379675029e-05, |
|
"loss": 0.3353, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 1.0773729689320815, |
|
"grad_norm": 8.961548805236816, |
|
"learning_rate": 2.353576218640751e-05, |
|
"loss": 0.3442, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 1.0883665706558783, |
|
"grad_norm": 6.879663944244385, |
|
"learning_rate": 2.3469800576064733e-05, |
|
"loss": 0.331, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 1.099360172379675, |
|
"grad_norm": 10.819347381591797, |
|
"learning_rate": 2.3403838965721953e-05, |
|
"loss": 0.3431, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 1.1103537741034717, |
|
"grad_norm": 18.258974075317383, |
|
"learning_rate": 2.3337877355379167e-05, |
|
"loss": 0.3468, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 1.1213473758272685, |
|
"grad_norm": 11.075167655944824, |
|
"learning_rate": 2.3271915745036388e-05, |
|
"loss": 0.3313, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 1.1323409775510653, |
|
"grad_norm": 13.255118370056152, |
|
"learning_rate": 2.320595413469361e-05, |
|
"loss": 0.3379, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 1.1433345792748622, |
|
"grad_norm": 9.165648460388184, |
|
"learning_rate": 2.3139992524350827e-05, |
|
"loss": 0.3402, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 1.1543281809986587, |
|
"grad_norm": 20.563486099243164, |
|
"learning_rate": 2.3074030914008047e-05, |
|
"loss": 0.3429, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 1.1653217827224556, |
|
"grad_norm": 23.879840850830078, |
|
"learning_rate": 2.3008069303665268e-05, |
|
"loss": 0.3437, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.1763153844462524, |
|
"grad_norm": 16.95256996154785, |
|
"learning_rate": 2.294210769332249e-05, |
|
"loss": 0.3484, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.187308986170049, |
|
"grad_norm": 23.673189163208008, |
|
"learning_rate": 2.2876146082979707e-05, |
|
"loss": 0.3175, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.1983025878938458, |
|
"grad_norm": 12.443720817565918, |
|
"learning_rate": 2.2810184472636927e-05, |
|
"loss": 0.3471, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.2092961896176426, |
|
"grad_norm": 6.558742046356201, |
|
"learning_rate": 2.2744222862294145e-05, |
|
"loss": 0.3463, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.2202897913414392, |
|
"grad_norm": 3.0714826583862305, |
|
"learning_rate": 2.2678261251951362e-05, |
|
"loss": 0.3352, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.231283393065236, |
|
"grad_norm": 6.919187068939209, |
|
"learning_rate": 2.2612299641608583e-05, |
|
"loss": 0.335, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.2422769947890329, |
|
"grad_norm": 8.951086044311523, |
|
"learning_rate": 2.2546338031265804e-05, |
|
"loss": 0.3437, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.2532705965128295, |
|
"grad_norm": 81.6339111328125, |
|
"learning_rate": 2.2480376420923025e-05, |
|
"loss": 0.318, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.2642641982366263, |
|
"grad_norm": 10.197173118591309, |
|
"learning_rate": 2.2414414810580242e-05, |
|
"loss": 0.3478, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.275257799960423, |
|
"grad_norm": 3.3102078437805176, |
|
"learning_rate": 2.2348453200237463e-05, |
|
"loss": 0.3316, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.2862514016842197, |
|
"grad_norm": 7.871964454650879, |
|
"learning_rate": 2.2282491589894684e-05, |
|
"loss": 0.3329, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.2972450034080165, |
|
"grad_norm": 13.741714477539062, |
|
"learning_rate": 2.22165299795519e-05, |
|
"loss": 0.3285, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.3082386051318133, |
|
"grad_norm": 5.765045166015625, |
|
"learning_rate": 2.215056836920912e-05, |
|
"loss": 0.3363, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.31923220685561, |
|
"grad_norm": 21.365049362182617, |
|
"learning_rate": 2.208460675886634e-05, |
|
"loss": 0.3475, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.3302258085794068, |
|
"grad_norm": 16.869543075561523, |
|
"learning_rate": 2.201864514852356e-05, |
|
"loss": 0.3252, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.3412194103032036, |
|
"grad_norm": 31.089399337768555, |
|
"learning_rate": 2.1952683538180778e-05, |
|
"loss": 0.3344, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.3522130120270002, |
|
"grad_norm": 11.333529472351074, |
|
"learning_rate": 2.1886721927838e-05, |
|
"loss": 0.3641, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.363206613750797, |
|
"grad_norm": 9.713915824890137, |
|
"learning_rate": 2.182076031749522e-05, |
|
"loss": 0.3415, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.3742002154745938, |
|
"grad_norm": 8.068568229675293, |
|
"learning_rate": 2.175479870715244e-05, |
|
"loss": 0.3591, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.3851938171983904, |
|
"grad_norm": 19.801572799682617, |
|
"learning_rate": 2.1688837096809655e-05, |
|
"loss": 0.335, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.3961874189221872, |
|
"grad_norm": 28.160655975341797, |
|
"learning_rate": 2.1622875486466876e-05, |
|
"loss": 0.3374, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.407181020645984, |
|
"grad_norm": 2.872919797897339, |
|
"learning_rate": 2.1556913876124096e-05, |
|
"loss": 0.3403, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.4181746223697809, |
|
"grad_norm": 2.8728220462799072, |
|
"learning_rate": 2.1490952265781317e-05, |
|
"loss": 0.3384, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.4291682240935775, |
|
"grad_norm": 5.093236923217773, |
|
"learning_rate": 2.1424990655438535e-05, |
|
"loss": 0.3305, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.4401618258173743, |
|
"grad_norm": 17.081308364868164, |
|
"learning_rate": 2.1359029045095756e-05, |
|
"loss": 0.3589, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.451155427541171, |
|
"grad_norm": 2.3248064517974854, |
|
"learning_rate": 2.1293067434752976e-05, |
|
"loss": 0.3313, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.462149029264968, |
|
"grad_norm": 24.70163917541504, |
|
"learning_rate": 2.1227105824410194e-05, |
|
"loss": 0.3338, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.4731426309887645, |
|
"grad_norm": 19.80680274963379, |
|
"learning_rate": 2.116114421406741e-05, |
|
"loss": 0.3487, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.4841362327125613, |
|
"grad_norm": 2.365659713745117, |
|
"learning_rate": 2.1095182603724632e-05, |
|
"loss": 0.3479, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.4951298344363582, |
|
"grad_norm": 2.9082655906677246, |
|
"learning_rate": 2.1029220993381853e-05, |
|
"loss": 0.3427, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.5061234361601548, |
|
"grad_norm": 1.5247036218643188, |
|
"learning_rate": 2.096325938303907e-05, |
|
"loss": 0.321, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.5171170378839516, |
|
"grad_norm": 13.082464218139648, |
|
"learning_rate": 2.089729777269629e-05, |
|
"loss": 0.347, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.5281106396077484, |
|
"grad_norm": 32.83438491821289, |
|
"learning_rate": 2.0831336162353512e-05, |
|
"loss": 0.3382, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.539104241331545, |
|
"grad_norm": 19.514705657958984, |
|
"learning_rate": 2.0765374552010733e-05, |
|
"loss": 0.334, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.5500978430553418, |
|
"grad_norm": 11.129077911376953, |
|
"learning_rate": 2.069941294166795e-05, |
|
"loss": 0.3376, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.5610914447791386, |
|
"grad_norm": 14.449658393859863, |
|
"learning_rate": 2.0633451331325168e-05, |
|
"loss": 0.3297, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.5720850465029352, |
|
"grad_norm": 4.129580974578857, |
|
"learning_rate": 2.056748972098239e-05, |
|
"loss": 0.3405, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.583078648226732, |
|
"grad_norm": 4.104194164276123, |
|
"learning_rate": 2.0501528110639606e-05, |
|
"loss": 0.3327, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.5940722499505289, |
|
"grad_norm": 12.376803398132324, |
|
"learning_rate": 2.0435566500296827e-05, |
|
"loss": 0.3178, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.6050658516743255, |
|
"grad_norm": 18.45488739013672, |
|
"learning_rate": 2.0369604889954048e-05, |
|
"loss": 0.3457, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.6160594533981223, |
|
"grad_norm": 6.8812174797058105, |
|
"learning_rate": 2.030364327961127e-05, |
|
"loss": 0.3452, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.6270530551219191, |
|
"grad_norm": 3.4659981727600098, |
|
"learning_rate": 2.0237681669268486e-05, |
|
"loss": 0.3407, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.6380466568457157, |
|
"grad_norm": 21.697237014770508, |
|
"learning_rate": 2.0171720058925707e-05, |
|
"loss": 0.3493, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.6490402585695125, |
|
"grad_norm": 20.997262954711914, |
|
"learning_rate": 2.0105758448582925e-05, |
|
"loss": 0.3453, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.6600338602933093, |
|
"grad_norm": 8.582404136657715, |
|
"learning_rate": 2.0039796838240142e-05, |
|
"loss": 0.344, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.671027462017106, |
|
"grad_norm": 15.028887748718262, |
|
"learning_rate": 1.9973835227897363e-05, |
|
"loss": 0.3428, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.6820210637409028, |
|
"grad_norm": 15.200948715209961, |
|
"learning_rate": 1.9907873617554584e-05, |
|
"loss": 0.3531, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.6930146654646996, |
|
"grad_norm": 12.243021011352539, |
|
"learning_rate": 1.9841912007211805e-05, |
|
"loss": 0.3288, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.7040082671884962, |
|
"grad_norm": 0.7417749166488647, |
|
"learning_rate": 1.9775950396869022e-05, |
|
"loss": 0.3305, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.715001868912293, |
|
"grad_norm": 12.099386215209961, |
|
"learning_rate": 1.9709988786526243e-05, |
|
"loss": 0.3521, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.7259954706360898, |
|
"grad_norm": 10.566434860229492, |
|
"learning_rate": 1.9644027176183464e-05, |
|
"loss": 0.3466, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.7369890723598864, |
|
"grad_norm": 1.6488581895828247, |
|
"learning_rate": 1.957806556584068e-05, |
|
"loss": 0.3453, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.7479826740836832, |
|
"grad_norm": 15.446043968200684, |
|
"learning_rate": 1.95121039554979e-05, |
|
"loss": 0.3547, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.75897627580748, |
|
"grad_norm": 3.6907153129577637, |
|
"learning_rate": 1.944614234515512e-05, |
|
"loss": 0.3346, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.7699698775312767, |
|
"grad_norm": 13.95593547821045, |
|
"learning_rate": 1.938018073481234e-05, |
|
"loss": 0.3325, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.7809634792550737, |
|
"grad_norm": 7.613198757171631, |
|
"learning_rate": 1.9314219124469558e-05, |
|
"loss": 0.3302, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.7919570809788703, |
|
"grad_norm": 17.56180191040039, |
|
"learning_rate": 1.924825751412678e-05, |
|
"loss": 0.3303, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.802950682702667, |
|
"grad_norm": 30.033525466918945, |
|
"learning_rate": 1.9182295903784e-05, |
|
"loss": 0.327, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.813944284426464, |
|
"grad_norm": 2.658094644546509, |
|
"learning_rate": 1.911633429344122e-05, |
|
"loss": 0.3462, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.8249378861502605, |
|
"grad_norm": 8.311567306518555, |
|
"learning_rate": 1.9050372683098434e-05, |
|
"loss": 0.3347, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.8359314878740571, |
|
"grad_norm": 17.150461196899414, |
|
"learning_rate": 1.8984411072755655e-05, |
|
"loss": 0.3282, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.8469250895978542, |
|
"grad_norm": 10.157220840454102, |
|
"learning_rate": 1.8918449462412876e-05, |
|
"loss": 0.3193, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.8579186913216508, |
|
"grad_norm": 4.80257511138916, |
|
"learning_rate": 1.8852487852070097e-05, |
|
"loss": 0.3541, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.8689122930454476, |
|
"grad_norm": 3.8798446655273438, |
|
"learning_rate": 1.8786526241727314e-05, |
|
"loss": 0.3483, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.8799058947692444, |
|
"grad_norm": 8.881115913391113, |
|
"learning_rate": 1.8720564631384535e-05, |
|
"loss": 0.3302, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.890899496493041, |
|
"grad_norm": 0.945717453956604, |
|
"learning_rate": 1.8654603021041756e-05, |
|
"loss": 0.336, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.9018930982168378, |
|
"grad_norm": 5.144163608551025, |
|
"learning_rate": 1.8588641410698974e-05, |
|
"loss": 0.3148, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.9128866999406346, |
|
"grad_norm": 1.5305918455123901, |
|
"learning_rate": 1.852267980035619e-05, |
|
"loss": 0.3395, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.9238803016644312, |
|
"grad_norm": 15.06664752960205, |
|
"learning_rate": 1.8456718190013412e-05, |
|
"loss": 0.341, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.934873903388228, |
|
"grad_norm": 33.20983123779297, |
|
"learning_rate": 1.8390756579670633e-05, |
|
"loss": 0.3619, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.9458675051120249, |
|
"grad_norm": 11.427024841308594, |
|
"learning_rate": 1.832479496932785e-05, |
|
"loss": 0.3474, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.9568611068358215, |
|
"grad_norm": 23.793506622314453, |
|
"learning_rate": 1.825883335898507e-05, |
|
"loss": 0.321, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.9678547085596183, |
|
"grad_norm": 3.9075679779052734, |
|
"learning_rate": 1.8192871748642292e-05, |
|
"loss": 0.33, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.9788483102834151, |
|
"grad_norm": 1.7106132507324219, |
|
"learning_rate": 1.8126910138299513e-05, |
|
"loss": 0.3562, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.9898419120072117, |
|
"grad_norm": 1.6460707187652588, |
|
"learning_rate": 1.806094852795673e-05, |
|
"loss": 0.3378, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.8638139963150024, |
|
"eval_f1": 0.8192264758027448, |
|
"eval_loss": 0.3989393711090088, |
|
"eval_mcc": 0.7101352338901957, |
|
"eval_runtime": 29.2332, |
|
"eval_samples_per_second": 691.508, |
|
"eval_steps_per_second": 86.443, |
|
"step": 90962 |
|
}, |
|
{ |
|
"epoch": 2.0008355137310088, |
|
"grad_norm": 0.9355267882347107, |
|
"learning_rate": 1.7994986917613948e-05, |
|
"loss": 0.3212, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 2.0118291154548054, |
|
"grad_norm": 11.889479637145996, |
|
"learning_rate": 1.792902530727117e-05, |
|
"loss": 0.3003, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 2.022822717178602, |
|
"grad_norm": 5.802761077880859, |
|
"learning_rate": 1.7863063696928386e-05, |
|
"loss": 0.2923, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 2.033816318902399, |
|
"grad_norm": 7.432724475860596, |
|
"learning_rate": 1.7797102086585607e-05, |
|
"loss": 0.293, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 2.0448099206261956, |
|
"grad_norm": 4.792222499847412, |
|
"learning_rate": 1.7731140476242828e-05, |
|
"loss": 0.3043, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 2.055803522349992, |
|
"grad_norm": 8.417468070983887, |
|
"learning_rate": 1.766517886590005e-05, |
|
"loss": 0.2916, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 2.0667971240737892, |
|
"grad_norm": 1.0828003883361816, |
|
"learning_rate": 1.7599217255557266e-05, |
|
"loss": 0.3112, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 2.077790725797586, |
|
"grad_norm": 3.0800647735595703, |
|
"learning_rate": 1.7533255645214487e-05, |
|
"loss": 0.2804, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 2.0887843275213824, |
|
"grad_norm": 3.254809617996216, |
|
"learning_rate": 1.7467294034871704e-05, |
|
"loss": 0.2909, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 2.0997779292451795, |
|
"grad_norm": 2.6388871669769287, |
|
"learning_rate": 1.7401332424528925e-05, |
|
"loss": 0.305, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 2.110771530968976, |
|
"grad_norm": 16.700735092163086, |
|
"learning_rate": 1.7335370814186143e-05, |
|
"loss": 0.2901, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 2.1217651326927727, |
|
"grad_norm": 17.04293441772461, |
|
"learning_rate": 1.7269409203843363e-05, |
|
"loss": 0.3217, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 2.1327587344165697, |
|
"grad_norm": 1.1329630613327026, |
|
"learning_rate": 1.7203447593500584e-05, |
|
"loss": 0.2898, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 2.1437523361403663, |
|
"grad_norm": 36.23415756225586, |
|
"learning_rate": 1.7137485983157802e-05, |
|
"loss": 0.2864, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 2.154745937864163, |
|
"grad_norm": 106.94963836669922, |
|
"learning_rate": 1.7071524372815023e-05, |
|
"loss": 0.3083, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 2.16573953958796, |
|
"grad_norm": 0.37686920166015625, |
|
"learning_rate": 1.7005562762472243e-05, |
|
"loss": 0.3238, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 2.1767331413117565, |
|
"grad_norm": 36.30667495727539, |
|
"learning_rate": 1.693960115212946e-05, |
|
"loss": 0.2917, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 2.187726743035553, |
|
"grad_norm": 35.45988082885742, |
|
"learning_rate": 1.687363954178668e-05, |
|
"loss": 0.3009, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 2.19872034475935, |
|
"grad_norm": 0.25279441475868225, |
|
"learning_rate": 1.68076779314439e-05, |
|
"loss": 0.3158, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 2.209713946483147, |
|
"grad_norm": 10.676984786987305, |
|
"learning_rate": 1.674171632110112e-05, |
|
"loss": 0.3005, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 2.2207075482069434, |
|
"grad_norm": 19.04880714416504, |
|
"learning_rate": 1.6675754710758337e-05, |
|
"loss": 0.3018, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 2.2317011499307404, |
|
"grad_norm": 35.431583404541016, |
|
"learning_rate": 1.660979310041556e-05, |
|
"loss": 0.3135, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 2.242694751654537, |
|
"grad_norm": 0.3378468155860901, |
|
"learning_rate": 1.654383149007278e-05, |
|
"loss": 0.2779, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 2.2536883533783336, |
|
"grad_norm": 46.81476974487305, |
|
"learning_rate": 1.647786987973e-05, |
|
"loss": 0.279, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 2.2646819551021307, |
|
"grad_norm": 36.49277877807617, |
|
"learning_rate": 1.6411908269387214e-05, |
|
"loss": 0.3184, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 2.2756755568259273, |
|
"grad_norm": 12.877152442932129, |
|
"learning_rate": 1.6345946659044435e-05, |
|
"loss": 0.3024, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 2.2866691585497243, |
|
"grad_norm": 4.798713684082031, |
|
"learning_rate": 1.6279985048701656e-05, |
|
"loss": 0.3014, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 2.297662760273521, |
|
"grad_norm": 6.63606071472168, |
|
"learning_rate": 1.6214023438358877e-05, |
|
"loss": 0.3214, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 2.3086563619973175, |
|
"grad_norm": 13.403897285461426, |
|
"learning_rate": 1.6148061828016094e-05, |
|
"loss": 0.2943, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 2.3196499637211145, |
|
"grad_norm": 33.9350471496582, |
|
"learning_rate": 1.6082100217673315e-05, |
|
"loss": 0.302, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 2.330643565444911, |
|
"grad_norm": 3.330829620361328, |
|
"learning_rate": 1.6016138607330536e-05, |
|
"loss": 0.3087, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 2.3416371671687077, |
|
"grad_norm": 1.2686516046524048, |
|
"learning_rate": 1.5950176996987753e-05, |
|
"loss": 0.3007, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 2.3526307688925048, |
|
"grad_norm": 20.976926803588867, |
|
"learning_rate": 1.5884215386644974e-05, |
|
"loss": 0.3094, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 2.3636243706163014, |
|
"grad_norm": 0.7142143249511719, |
|
"learning_rate": 1.581825377630219e-05, |
|
"loss": 0.3169, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 2.374617972340098, |
|
"grad_norm": 6.738494873046875, |
|
"learning_rate": 1.5752292165959412e-05, |
|
"loss": 0.3101, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 2.385611574063895, |
|
"grad_norm": 0.8053629398345947, |
|
"learning_rate": 1.568633055561663e-05, |
|
"loss": 0.3208, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 2.3966051757876916, |
|
"grad_norm": 0.35285481810569763, |
|
"learning_rate": 1.562036894527385e-05, |
|
"loss": 0.2903, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 2.407598777511488, |
|
"grad_norm": 0.9598795771598816, |
|
"learning_rate": 1.555440733493107e-05, |
|
"loss": 0.3189, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 2.4185923792352853, |
|
"grad_norm": 8.283425331115723, |
|
"learning_rate": 1.5488445724588292e-05, |
|
"loss": 0.2922, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 2.429585980959082, |
|
"grad_norm": 2.2365481853485107, |
|
"learning_rate": 1.542248411424551e-05, |
|
"loss": 0.2865, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 2.4405795826828784, |
|
"grad_norm": 22.584705352783203, |
|
"learning_rate": 1.535652250390273e-05, |
|
"loss": 0.2883, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 2.4515731844066755, |
|
"grad_norm": 1.3138020038604736, |
|
"learning_rate": 1.5290560893559948e-05, |
|
"loss": 0.3233, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 2.462566786130472, |
|
"grad_norm": 17.076557159423828, |
|
"learning_rate": 1.5224599283217167e-05, |
|
"loss": 0.3017, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 2.4735603878542687, |
|
"grad_norm": 139.9231719970703, |
|
"learning_rate": 1.5158637672874386e-05, |
|
"loss": 0.3033, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 2.4845539895780657, |
|
"grad_norm": 8.334077835083008, |
|
"learning_rate": 1.5092676062531607e-05, |
|
"loss": 0.2925, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 2.4955475913018623, |
|
"grad_norm": 0.4488193094730377, |
|
"learning_rate": 1.5026714452188828e-05, |
|
"loss": 0.314, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 2.506541193025659, |
|
"grad_norm": 18.986644744873047, |
|
"learning_rate": 1.4960752841846047e-05, |
|
"loss": 0.3011, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 2.517534794749456, |
|
"grad_norm": 0.16863927245140076, |
|
"learning_rate": 1.4894791231503265e-05, |
|
"loss": 0.2845, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 2.5285283964732526, |
|
"grad_norm": 19.12157440185547, |
|
"learning_rate": 1.4828829621160486e-05, |
|
"loss": 0.3092, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 2.539521998197049, |
|
"grad_norm": 6.872998237609863, |
|
"learning_rate": 1.4762868010817705e-05, |
|
"loss": 0.3072, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 2.550515599920846, |
|
"grad_norm": 0.4193851947784424, |
|
"learning_rate": 1.4696906400474924e-05, |
|
"loss": 0.3004, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 2.561509201644643, |
|
"grad_norm": 0.6917738318443298, |
|
"learning_rate": 1.4630944790132143e-05, |
|
"loss": 0.2967, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 2.5725028033684394, |
|
"grad_norm": 10.825478553771973, |
|
"learning_rate": 1.4564983179789362e-05, |
|
"loss": 0.3086, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 2.5834964050922364, |
|
"grad_norm": 68.07927703857422, |
|
"learning_rate": 1.4499021569446583e-05, |
|
"loss": 0.2964, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 2.594490006816033, |
|
"grad_norm": 5.563518047332764, |
|
"learning_rate": 1.4433059959103802e-05, |
|
"loss": 0.3067, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 2.6054836085398296, |
|
"grad_norm": 4.1622633934021, |
|
"learning_rate": 1.4367098348761021e-05, |
|
"loss": 0.3019, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 2.6164772102636267, |
|
"grad_norm": 20.468860626220703, |
|
"learning_rate": 1.430113673841824e-05, |
|
"loss": 0.2857, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 2.6274708119874233, |
|
"grad_norm": 19.43634605407715, |
|
"learning_rate": 1.4235175128075461e-05, |
|
"loss": 0.3079, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 2.63846441371122, |
|
"grad_norm": 6.021149158477783, |
|
"learning_rate": 1.416921351773268e-05, |
|
"loss": 0.2901, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 2.649458015435017, |
|
"grad_norm": 8.589285850524902, |
|
"learning_rate": 1.41032519073899e-05, |
|
"loss": 0.3109, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 2.6604516171588135, |
|
"grad_norm": 16.921823501586914, |
|
"learning_rate": 1.4037290297047119e-05, |
|
"loss": 0.3002, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 2.67144521888261, |
|
"grad_norm": 16.486186981201172, |
|
"learning_rate": 1.3971328686704338e-05, |
|
"loss": 0.306, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 2.682438820606407, |
|
"grad_norm": 8.290379524230957, |
|
"learning_rate": 1.3905367076361559e-05, |
|
"loss": 0.3005, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 2.6934324223302037, |
|
"grad_norm": 0.8587543964385986, |
|
"learning_rate": 1.3839405466018776e-05, |
|
"loss": 0.29, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 2.7044260240540003, |
|
"grad_norm": 45.68854904174805, |
|
"learning_rate": 1.3773443855675997e-05, |
|
"loss": 0.3037, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 2.7154196257777974, |
|
"grad_norm": 13.316100120544434, |
|
"learning_rate": 1.3707482245333216e-05, |
|
"loss": 0.2717, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 2.726413227501594, |
|
"grad_norm": 5.796350479125977, |
|
"learning_rate": 1.3641520634990437e-05, |
|
"loss": 0.3116, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 2.7374068292253906, |
|
"grad_norm": 10.975761413574219, |
|
"learning_rate": 1.3575559024647655e-05, |
|
"loss": 0.3089, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 2.7484004309491876, |
|
"grad_norm": 99.86316680908203, |
|
"learning_rate": 1.3509597414304875e-05, |
|
"loss": 0.3071, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 2.759394032672984, |
|
"grad_norm": 92.33716583251953, |
|
"learning_rate": 1.3443635803962095e-05, |
|
"loss": 0.2881, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 2.770387634396781, |
|
"grad_norm": 20.75370979309082, |
|
"learning_rate": 1.3377674193619314e-05, |
|
"loss": 0.2922, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 2.781381236120578, |
|
"grad_norm": 63.51997756958008, |
|
"learning_rate": 1.3311712583276533e-05, |
|
"loss": 0.3016, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 2.7923748378443745, |
|
"grad_norm": 12.819772720336914, |
|
"learning_rate": 1.3245750972933752e-05, |
|
"loss": 0.3005, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 2.803368439568171, |
|
"grad_norm": 14.167094230651855, |
|
"learning_rate": 1.3179789362590973e-05, |
|
"loss": 0.3099, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 2.814362041291968, |
|
"grad_norm": 14.828591346740723, |
|
"learning_rate": 1.3113827752248192e-05, |
|
"loss": 0.2759, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 2.8253556430157647, |
|
"grad_norm": 9.91226577758789, |
|
"learning_rate": 1.3047866141905411e-05, |
|
"loss": 0.3025, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 2.8363492447395617, |
|
"grad_norm": 38.544525146484375, |
|
"learning_rate": 1.298190453156263e-05, |
|
"loss": 0.3038, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 2.8473428464633583, |
|
"grad_norm": 5.008056640625, |
|
"learning_rate": 1.2915942921219851e-05, |
|
"loss": 0.2947, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 2.858336448187155, |
|
"grad_norm": 14.466870307922363, |
|
"learning_rate": 1.284998131087707e-05, |
|
"loss": 0.2989, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 2.869330049910952, |
|
"grad_norm": 0.3647148907184601, |
|
"learning_rate": 1.278401970053429e-05, |
|
"loss": 0.2864, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 2.8803236516347486, |
|
"grad_norm": 0.18057258427143097, |
|
"learning_rate": 1.2718058090191509e-05, |
|
"loss": 0.2894, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 2.891317253358545, |
|
"grad_norm": 0.5057438015937805, |
|
"learning_rate": 1.2652096479848728e-05, |
|
"loss": 0.302, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 2.902310855082342, |
|
"grad_norm": 10.934133529663086, |
|
"learning_rate": 1.2586134869505949e-05, |
|
"loss": 0.3047, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 2.913304456806139, |
|
"grad_norm": 2.3341269493103027, |
|
"learning_rate": 1.2520173259163166e-05, |
|
"loss": 0.3011, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 2.924298058529936, |
|
"grad_norm": 0.3195688724517822, |
|
"learning_rate": 1.2454211648820387e-05, |
|
"loss": 0.2939, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 2.9352916602537324, |
|
"grad_norm": 8.257743835449219, |
|
"learning_rate": 1.2388250038477606e-05, |
|
"loss": 0.3057, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 2.946285261977529, |
|
"grad_norm": 20.577478408813477, |
|
"learning_rate": 1.2322288428134827e-05, |
|
"loss": 0.2966, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 2.957278863701326, |
|
"grad_norm": 0.777562141418457, |
|
"learning_rate": 1.2256326817792044e-05, |
|
"loss": 0.3013, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 2.9682724654251227, |
|
"grad_norm": 58.6212158203125, |
|
"learning_rate": 1.2190365207449265e-05, |
|
"loss": 0.2874, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 2.9792660671489193, |
|
"grad_norm": 69.42217254638672, |
|
"learning_rate": 1.2124403597106484e-05, |
|
"loss": 0.293, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 2.9902596688727163, |
|
"grad_norm": 4.408263683319092, |
|
"learning_rate": 1.2058441986763704e-05, |
|
"loss": 0.3005, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.8718278408050537, |
|
"eval_f1": 0.8299757201916136, |
|
"eval_loss": 0.42042940855026245, |
|
"eval_mcc": 0.7272909835972381, |
|
"eval_runtime": 28.09, |
|
"eval_samples_per_second": 719.65, |
|
"eval_steps_per_second": 89.961, |
|
"step": 136443 |
|
}, |
|
{ |
|
"epoch": 3.001253270596513, |
|
"grad_norm": 6.014369010925293, |
|
"learning_rate": 1.1992480376420923e-05, |
|
"loss": 0.281, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 3.0122468723203095, |
|
"grad_norm": 10.639359474182129, |
|
"learning_rate": 1.1926518766078142e-05, |
|
"loss": 0.2556, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 3.023240474044106, |
|
"grad_norm": 3.0724806785583496, |
|
"learning_rate": 1.1860557155735363e-05, |
|
"loss": 0.2414, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 3.034234075767903, |
|
"grad_norm": 0.3316449522972107, |
|
"learning_rate": 1.1794595545392582e-05, |
|
"loss": 0.2539, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 3.0452276774916998, |
|
"grad_norm": 23.327177047729492, |
|
"learning_rate": 1.1728633935049801e-05, |
|
"loss": 0.2374, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 3.056221279215497, |
|
"grad_norm": 0.7128089666366577, |
|
"learning_rate": 1.166267232470702e-05, |
|
"loss": 0.2476, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 3.0672148809392934, |
|
"grad_norm": 189.15638732910156, |
|
"learning_rate": 1.1596710714364241e-05, |
|
"loss": 0.2431, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 3.07820848266309, |
|
"grad_norm": 17.80859375, |
|
"learning_rate": 1.153074910402146e-05, |
|
"loss": 0.225, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 3.089202084386887, |
|
"grad_norm": 0.14886409044265747, |
|
"learning_rate": 1.146478749367868e-05, |
|
"loss": 0.2495, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 3.1001956861106836, |
|
"grad_norm": 0.5925188064575195, |
|
"learning_rate": 1.1398825883335899e-05, |
|
"loss": 0.2541, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 3.1111892878344802, |
|
"grad_norm": 9.857983589172363, |
|
"learning_rate": 1.1332864272993118e-05, |
|
"loss": 0.2565, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 3.1221828895582773, |
|
"grad_norm": 0.7951391339302063, |
|
"learning_rate": 1.1266902662650339e-05, |
|
"loss": 0.2561, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 3.133176491282074, |
|
"grad_norm": 61.58017349243164, |
|
"learning_rate": 1.1200941052307556e-05, |
|
"loss": 0.2361, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 3.1441700930058705, |
|
"grad_norm": 9.199590682983398, |
|
"learning_rate": 1.1134979441964777e-05, |
|
"loss": 0.2559, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 3.1551636947296675, |
|
"grad_norm": 1.7396503686904907, |
|
"learning_rate": 1.1069017831621996e-05, |
|
"loss": 0.2543, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 3.166157296453464, |
|
"grad_norm": 185.94760131835938, |
|
"learning_rate": 1.1003056221279217e-05, |
|
"loss": 0.2309, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 3.1771508981772607, |
|
"grad_norm": 42.58454132080078, |
|
"learning_rate": 1.0937094610936434e-05, |
|
"loss": 0.2634, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 3.1881444999010577, |
|
"grad_norm": 0.19487299025058746, |
|
"learning_rate": 1.0871133000593655e-05, |
|
"loss": 0.2668, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 3.1991381016248543, |
|
"grad_norm": 0.11774999648332596, |
|
"learning_rate": 1.0805171390250874e-05, |
|
"loss": 0.2531, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 3.210131703348651, |
|
"grad_norm": 0.8709030747413635, |
|
"learning_rate": 1.0739209779908093e-05, |
|
"loss": 0.2579, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 3.221125305072448, |
|
"grad_norm": 0.26620733737945557, |
|
"learning_rate": 1.0673248169565313e-05, |
|
"loss": 0.2456, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 3.2321189067962446, |
|
"grad_norm": 30.161376953125, |
|
"learning_rate": 1.0607286559222532e-05, |
|
"loss": 0.2588, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 3.243112508520041, |
|
"grad_norm": 14.72189998626709, |
|
"learning_rate": 1.0541324948879753e-05, |
|
"loss": 0.2538, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 3.2541061102438382, |
|
"grad_norm": 22.82953453063965, |
|
"learning_rate": 1.0475363338536972e-05, |
|
"loss": 0.2398, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 3.265099711967635, |
|
"grad_norm": 1.3340407609939575, |
|
"learning_rate": 1.0409401728194191e-05, |
|
"loss": 0.2619, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 3.2760933136914314, |
|
"grad_norm": 4.700684070587158, |
|
"learning_rate": 1.034344011785141e-05, |
|
"loss": 0.2563, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 3.2870869154152285, |
|
"grad_norm": 49.13290786743164, |
|
"learning_rate": 1.0277478507508631e-05, |
|
"loss": 0.2756, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 3.298080517139025, |
|
"grad_norm": 0.5340966582298279, |
|
"learning_rate": 1.021151689716585e-05, |
|
"loss": 0.249, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 3.3090741188628217, |
|
"grad_norm": 156.9650115966797, |
|
"learning_rate": 1.014555528682307e-05, |
|
"loss": 0.237, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 3.3200677205866187, |
|
"grad_norm": 0.09667583554983139, |
|
"learning_rate": 1.0079593676480288e-05, |
|
"loss": 0.2621, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 3.3310613223104153, |
|
"grad_norm": 9.222663879394531, |
|
"learning_rate": 1.0013632066137508e-05, |
|
"loss": 0.2457, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 3.342054924034212, |
|
"grad_norm": 14.612710952758789, |
|
"learning_rate": 9.947670455794728e-06, |
|
"loss": 0.2555, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 3.353048525758009, |
|
"grad_norm": 50.92832565307617, |
|
"learning_rate": 9.881708845451946e-06, |
|
"loss": 0.2394, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 3.3640421274818055, |
|
"grad_norm": 1.5905165672302246, |
|
"learning_rate": 9.815747235109167e-06, |
|
"loss": 0.2408, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 3.375035729205602, |
|
"grad_norm": 34.99452209472656, |
|
"learning_rate": 9.749785624766386e-06, |
|
"loss": 0.2351, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 3.386029330929399, |
|
"grad_norm": 1.3218666315078735, |
|
"learning_rate": 9.683824014423607e-06, |
|
"loss": 0.2494, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 3.3970229326531958, |
|
"grad_norm": 19.163127899169922, |
|
"learning_rate": 9.617862404080824e-06, |
|
"loss": 0.2716, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 3.4080165343769924, |
|
"grad_norm": 0.3468831479549408, |
|
"learning_rate": 9.551900793738045e-06, |
|
"loss": 0.2617, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 3.4190101361007894, |
|
"grad_norm": 19.626012802124023, |
|
"learning_rate": 9.485939183395264e-06, |
|
"loss": 0.2651, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 3.430003737824586, |
|
"grad_norm": 17.755313873291016, |
|
"learning_rate": 9.419977573052483e-06, |
|
"loss": 0.2487, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 3.4409973395483826, |
|
"grad_norm": 14.6954984664917, |
|
"learning_rate": 9.354015962709702e-06, |
|
"loss": 0.2579, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 3.4519909412721796, |
|
"grad_norm": 24.834569931030273, |
|
"learning_rate": 9.288054352366922e-06, |
|
"loss": 0.2343, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 3.4629845429959762, |
|
"grad_norm": 11.748075485229492, |
|
"learning_rate": 9.222092742024142e-06, |
|
"loss": 0.2493, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 3.473978144719773, |
|
"grad_norm": 0.26250067353248596, |
|
"learning_rate": 9.156131131681362e-06, |
|
"loss": 0.2518, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 3.48497174644357, |
|
"grad_norm": 0.31238773465156555, |
|
"learning_rate": 9.09016952133858e-06, |
|
"loss": 0.2563, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 3.4959653481673665, |
|
"grad_norm": 0.38414067029953003, |
|
"learning_rate": 9.0242079109958e-06, |
|
"loss": 0.2605, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 3.506958949891163, |
|
"grad_norm": 0.26335904002189636, |
|
"learning_rate": 8.95824630065302e-06, |
|
"loss": 0.25, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 3.51795255161496, |
|
"grad_norm": 11.267284393310547, |
|
"learning_rate": 8.89228469031024e-06, |
|
"loss": 0.2446, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 3.5289461533387567, |
|
"grad_norm": 13.036714553833008, |
|
"learning_rate": 8.826323079967459e-06, |
|
"loss": 0.2341, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 3.5399397550625533, |
|
"grad_norm": 9.3615140914917, |
|
"learning_rate": 8.760361469624678e-06, |
|
"loss": 0.2632, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 3.5509333567863504, |
|
"grad_norm": 0.3549996018409729, |
|
"learning_rate": 8.694399859281897e-06, |
|
"loss": 0.2507, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 3.561926958510147, |
|
"grad_norm": 0.46619582176208496, |
|
"learning_rate": 8.628438248939118e-06, |
|
"loss": 0.2543, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 3.572920560233944, |
|
"grad_norm": 0.9738485217094421, |
|
"learning_rate": 8.562476638596337e-06, |
|
"loss": 0.2672, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 3.5839141619577406, |
|
"grad_norm": 12.682645797729492, |
|
"learning_rate": 8.496515028253557e-06, |
|
"loss": 0.2422, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 3.594907763681537, |
|
"grad_norm": 0.33584246039390564, |
|
"learning_rate": 8.430553417910776e-06, |
|
"loss": 0.2597, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 3.6059013654053342, |
|
"grad_norm": 8.06340217590332, |
|
"learning_rate": 8.364591807567997e-06, |
|
"loss": 0.271, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 3.616894967129131, |
|
"grad_norm": 0.4343748688697815, |
|
"learning_rate": 8.298630197225216e-06, |
|
"loss": 0.2686, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 3.6278885688529274, |
|
"grad_norm": 38.23839569091797, |
|
"learning_rate": 8.232668586882435e-06, |
|
"loss": 0.2605, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 3.6388821705767245, |
|
"grad_norm": 20.085224151611328, |
|
"learning_rate": 8.166706976539654e-06, |
|
"loss": 0.2449, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 3.649875772300521, |
|
"grad_norm": 0.3538534641265869, |
|
"learning_rate": 8.100745366196875e-06, |
|
"loss": 0.2436, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 3.660869374024318, |
|
"grad_norm": 0.3242553174495697, |
|
"learning_rate": 8.034783755854094e-06, |
|
"loss": 0.2468, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 3.6718629757481147, |
|
"grad_norm": 28.696617126464844, |
|
"learning_rate": 7.968822145511311e-06, |
|
"loss": 0.2621, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 3.6828565774719113, |
|
"grad_norm": 0.26111406087875366, |
|
"learning_rate": 7.902860535168532e-06, |
|
"loss": 0.2519, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 3.6938501791957083, |
|
"grad_norm": 0.24540553987026215, |
|
"learning_rate": 7.836898924825751e-06, |
|
"loss": 0.2515, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 3.704843780919505, |
|
"grad_norm": 0.4676073491573334, |
|
"learning_rate": 7.770937314482972e-06, |
|
"loss": 0.256, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 3.7158373826433015, |
|
"grad_norm": 87.25594329833984, |
|
"learning_rate": 7.70497570414019e-06, |
|
"loss": 0.2354, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 3.7268309843670986, |
|
"grad_norm": 1.1010403633117676, |
|
"learning_rate": 7.63901409379741e-06, |
|
"loss": 0.2394, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 3.737824586090895, |
|
"grad_norm": 0.1542312502861023, |
|
"learning_rate": 7.57305248345463e-06, |
|
"loss": 0.2491, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 3.748818187814692, |
|
"grad_norm": 2.4090046882629395, |
|
"learning_rate": 7.50709087311185e-06, |
|
"loss": 0.2337, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 3.759811789538489, |
|
"grad_norm": 6.501917362213135, |
|
"learning_rate": 7.441129262769069e-06, |
|
"loss": 0.2406, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 3.7708053912622854, |
|
"grad_norm": 19.246479034423828, |
|
"learning_rate": 7.375167652426288e-06, |
|
"loss": 0.2547, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 3.781798992986082, |
|
"grad_norm": 0.10231161117553711, |
|
"learning_rate": 7.309206042083508e-06, |
|
"loss": 0.2379, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 3.792792594709879, |
|
"grad_norm": 10.832609176635742, |
|
"learning_rate": 7.243244431740727e-06, |
|
"loss": 0.2711, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 3.8037861964336757, |
|
"grad_norm": 18.556346893310547, |
|
"learning_rate": 7.177282821397947e-06, |
|
"loss": 0.2265, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 3.8147797981574723, |
|
"grad_norm": 1.0246055126190186, |
|
"learning_rate": 7.111321211055166e-06, |
|
"loss": 0.2334, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 3.8257733998812693, |
|
"grad_norm": 1.1454087495803833, |
|
"learning_rate": 7.0453596007123855e-06, |
|
"loss": 0.2491, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 3.836767001605066, |
|
"grad_norm": 11.247049331665039, |
|
"learning_rate": 6.979397990369605e-06, |
|
"loss": 0.2632, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 3.8477606033288625, |
|
"grad_norm": 14.466601371765137, |
|
"learning_rate": 6.913436380026825e-06, |
|
"loss": 0.2639, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 3.8587542050526595, |
|
"grad_norm": 7.91213321685791, |
|
"learning_rate": 6.847474769684044e-06, |
|
"loss": 0.2527, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 3.869747806776456, |
|
"grad_norm": 26.528411865234375, |
|
"learning_rate": 6.781513159341264e-06, |
|
"loss": 0.2438, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 3.8807414085002527, |
|
"grad_norm": 0.7833952903747559, |
|
"learning_rate": 6.715551548998483e-06, |
|
"loss": 0.2585, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 3.8917350102240498, |
|
"grad_norm": 53.77830123901367, |
|
"learning_rate": 6.649589938655703e-06, |
|
"loss": 0.2615, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 3.9027286119478464, |
|
"grad_norm": 0.6139953136444092, |
|
"learning_rate": 6.583628328312922e-06, |
|
"loss": 0.2644, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 3.913722213671643, |
|
"grad_norm": 1.4486163854599, |
|
"learning_rate": 6.517666717970142e-06, |
|
"loss": 0.2302, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 3.92471581539544, |
|
"grad_norm": 5.287415504455566, |
|
"learning_rate": 6.451705107627361e-06, |
|
"loss": 0.2727, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 3.9357094171192366, |
|
"grad_norm": 26.611614227294922, |
|
"learning_rate": 6.3857434972845804e-06, |
|
"loss": 0.2514, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 3.946703018843033, |
|
"grad_norm": 14.361977577209473, |
|
"learning_rate": 6.3197818869418e-06, |
|
"loss": 0.2476, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 3.9576966205668302, |
|
"grad_norm": 1.3597434759140015, |
|
"learning_rate": 6.25382027659902e-06, |
|
"loss": 0.2406, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 3.968690222290627, |
|
"grad_norm": 10.218100547790527, |
|
"learning_rate": 6.187858666256239e-06, |
|
"loss": 0.254, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 3.9796838240144234, |
|
"grad_norm": 0.7666225433349609, |
|
"learning_rate": 6.121897055913459e-06, |
|
"loss": 0.2741, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 3.9906774257382205, |
|
"grad_norm": 36.5604133605957, |
|
"learning_rate": 6.055935445570678e-06, |
|
"loss": 0.2374, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.8765273094177246, |
|
"eval_f1": 0.8365850464842216, |
|
"eval_loss": 0.5333936214447021, |
|
"eval_mcc": 0.7376033359055921, |
|
"eval_runtime": 27.772, |
|
"eval_samples_per_second": 727.891, |
|
"eval_steps_per_second": 90.991, |
|
"step": 181924 |
|
}, |
|
{ |
|
"epoch": 4.0016710274620175, |
|
"grad_norm": 0.4658304750919342, |
|
"learning_rate": 5.989973835227898e-06, |
|
"loss": 0.2447, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 4.012664629185814, |
|
"grad_norm": 0.2597205340862274, |
|
"learning_rate": 5.924012224885117e-06, |
|
"loss": 0.1824, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 4.023658230909611, |
|
"grad_norm": 0.4755733907222748, |
|
"learning_rate": 5.858050614542337e-06, |
|
"loss": 0.2266, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 4.034651832633408, |
|
"grad_norm": 0.3996201753616333, |
|
"learning_rate": 5.792089004199556e-06, |
|
"loss": 0.1901, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 4.045645434357204, |
|
"grad_norm": 0.8636412620544434, |
|
"learning_rate": 5.726127393856775e-06, |
|
"loss": 0.1955, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 4.056639036081001, |
|
"grad_norm": 0.3265284597873688, |
|
"learning_rate": 5.6601657835139945e-06, |
|
"loss": 0.1979, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 4.067632637804798, |
|
"grad_norm": 0.1188616007566452, |
|
"learning_rate": 5.5942041731712145e-06, |
|
"loss": 0.2066, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 4.078626239528594, |
|
"grad_norm": 0.7546807527542114, |
|
"learning_rate": 5.528242562828434e-06, |
|
"loss": 0.1959, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 4.089619841252391, |
|
"grad_norm": 0.12139397114515305, |
|
"learning_rate": 5.462280952485654e-06, |
|
"loss": 0.1969, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 4.100613442976188, |
|
"grad_norm": 0.13494807481765747, |
|
"learning_rate": 5.396319342142873e-06, |
|
"loss": 0.1814, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 4.111607044699984, |
|
"grad_norm": 0.023194080218672752, |
|
"learning_rate": 5.330357731800093e-06, |
|
"loss": 0.1877, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 4.122600646423781, |
|
"grad_norm": 0.07898598164319992, |
|
"learning_rate": 5.264396121457312e-06, |
|
"loss": 0.209, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 4.1335942481475785, |
|
"grad_norm": 0.11429109424352646, |
|
"learning_rate": 5.198434511114532e-06, |
|
"loss": 0.1794, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 4.144587849871375, |
|
"grad_norm": 0.1126711368560791, |
|
"learning_rate": 5.132472900771751e-06, |
|
"loss": 0.2143, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 4.155581451595172, |
|
"grad_norm": 36.669212341308594, |
|
"learning_rate": 5.06651129042897e-06, |
|
"loss": 0.1995, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 4.166575053318969, |
|
"grad_norm": 0.0557066835463047, |
|
"learning_rate": 5.0005496800861894e-06, |
|
"loss": 0.1977, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 4.177568655042765, |
|
"grad_norm": 0.11854979395866394, |
|
"learning_rate": 4.9345880697434094e-06, |
|
"loss": 0.203, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 4.188562256766562, |
|
"grad_norm": 47.56736755371094, |
|
"learning_rate": 4.868626459400629e-06, |
|
"loss": 0.1673, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 4.199555858490359, |
|
"grad_norm": 1.5440220832824707, |
|
"learning_rate": 4.802664849057849e-06, |
|
"loss": 0.1998, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 4.210549460214155, |
|
"grad_norm": 0.11512400209903717, |
|
"learning_rate": 4.736703238715068e-06, |
|
"loss": 0.2027, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 4.221543061937952, |
|
"grad_norm": 13.000309944152832, |
|
"learning_rate": 4.670741628372288e-06, |
|
"loss": 0.2268, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 4.232536663661749, |
|
"grad_norm": 0.053511910140514374, |
|
"learning_rate": 4.604780018029507e-06, |
|
"loss": 0.2119, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 4.243530265385545, |
|
"grad_norm": 1.0577130317687988, |
|
"learning_rate": 4.538818407686727e-06, |
|
"loss": 0.207, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 4.254523867109342, |
|
"grad_norm": 0.5129163861274719, |
|
"learning_rate": 4.472856797343946e-06, |
|
"loss": 0.1971, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 4.265517468833139, |
|
"grad_norm": 38.92678451538086, |
|
"learning_rate": 4.406895187001165e-06, |
|
"loss": 0.1915, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 4.276511070556936, |
|
"grad_norm": 0.2508489489555359, |
|
"learning_rate": 4.340933576658384e-06, |
|
"loss": 0.2076, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 4.287504672280733, |
|
"grad_norm": 0.8289797306060791, |
|
"learning_rate": 4.274971966315604e-06, |
|
"loss": 0.1906, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 4.29849827400453, |
|
"grad_norm": 0.1511843502521515, |
|
"learning_rate": 4.2090103559728235e-06, |
|
"loss": 0.1908, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 4.309491875728326, |
|
"grad_norm": 36.47195053100586, |
|
"learning_rate": 4.1430487456300435e-06, |
|
"loss": 0.2198, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 4.320485477452123, |
|
"grad_norm": 59.39978790283203, |
|
"learning_rate": 4.077087135287263e-06, |
|
"loss": 0.1958, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 4.33147907917592, |
|
"grad_norm": 25.194355010986328, |
|
"learning_rate": 4.011125524944483e-06, |
|
"loss": 0.185, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 4.342472680899716, |
|
"grad_norm": 20.661163330078125, |
|
"learning_rate": 3.945163914601702e-06, |
|
"loss": 0.2032, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 4.353466282623513, |
|
"grad_norm": 0.04815911129117012, |
|
"learning_rate": 3.879202304258922e-06, |
|
"loss": 0.194, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 4.36445988434731, |
|
"grad_norm": 0.18730510771274567, |
|
"learning_rate": 3.8132406939161414e-06, |
|
"loss": 0.236, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 4.375453486071106, |
|
"grad_norm": 15.972749710083008, |
|
"learning_rate": 3.7472790835733606e-06, |
|
"loss": 0.196, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 4.386447087794903, |
|
"grad_norm": 0.25309285521507263, |
|
"learning_rate": 3.68131747323058e-06, |
|
"loss": 0.2161, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 4.3974406895187, |
|
"grad_norm": 0.17074181139469147, |
|
"learning_rate": 3.6153558628877997e-06, |
|
"loss": 0.1836, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 4.4084342912424965, |
|
"grad_norm": 20.413162231445312, |
|
"learning_rate": 3.549394252545019e-06, |
|
"loss": 0.1911, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 4.419427892966294, |
|
"grad_norm": 0.5931562781333923, |
|
"learning_rate": 3.4834326422022384e-06, |
|
"loss": 0.1847, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 4.430421494690091, |
|
"grad_norm": 37.10576248168945, |
|
"learning_rate": 3.417471031859458e-06, |
|
"loss": 0.1657, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 4.441415096413887, |
|
"grad_norm": 0.06108024716377258, |
|
"learning_rate": 3.3515094215166776e-06, |
|
"loss": 0.1949, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 4.452408698137684, |
|
"grad_norm": 14.7476224899292, |
|
"learning_rate": 3.285547811173897e-06, |
|
"loss": 0.2052, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 4.463402299861481, |
|
"grad_norm": 0.4280465841293335, |
|
"learning_rate": 3.2195862008311163e-06, |
|
"loss": 0.2127, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 4.474395901585277, |
|
"grad_norm": 0.38156208395957947, |
|
"learning_rate": 3.153624590488336e-06, |
|
"loss": 0.1949, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 4.485389503309074, |
|
"grad_norm": 325.33026123046875, |
|
"learning_rate": 3.0876629801455555e-06, |
|
"loss": 0.1976, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 4.496383105032871, |
|
"grad_norm": 99.75337982177734, |
|
"learning_rate": 3.021701369802775e-06, |
|
"loss": 0.2031, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 4.507376706756667, |
|
"grad_norm": 0.17061945796012878, |
|
"learning_rate": 2.9557397594599946e-06, |
|
"loss": 0.1964, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 4.518370308480464, |
|
"grad_norm": 25.07261085510254, |
|
"learning_rate": 2.8897781491172138e-06, |
|
"loss": 0.1875, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 4.529363910204261, |
|
"grad_norm": 0.2692670226097107, |
|
"learning_rate": 2.8238165387744334e-06, |
|
"loss": 0.1978, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 4.5403575119280575, |
|
"grad_norm": 2.56193470954895, |
|
"learning_rate": 2.757854928431653e-06, |
|
"loss": 0.207, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 4.5513511136518545, |
|
"grad_norm": 1.1347905397415161, |
|
"learning_rate": 2.6918933180888725e-06, |
|
"loss": 0.2049, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 4.562344715375652, |
|
"grad_norm": 0.9405034184455872, |
|
"learning_rate": 2.625931707746092e-06, |
|
"loss": 0.182, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 4.573338317099449, |
|
"grad_norm": 0.10386385023593903, |
|
"learning_rate": 2.5599700974033112e-06, |
|
"loss": 0.1816, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 4.584331918823245, |
|
"grad_norm": 1.0305184125900269, |
|
"learning_rate": 2.494008487060531e-06, |
|
"loss": 0.208, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 4.595325520547042, |
|
"grad_norm": 5.062295913696289, |
|
"learning_rate": 2.4280468767177504e-06, |
|
"loss": 0.1918, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 4.606319122270838, |
|
"grad_norm": 13.542932510375977, |
|
"learning_rate": 2.36208526637497e-06, |
|
"loss": 0.2016, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 4.617312723994635, |
|
"grad_norm": 28.13912582397461, |
|
"learning_rate": 2.2961236560321896e-06, |
|
"loss": 0.2065, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 4.628306325718432, |
|
"grad_norm": 38.89891052246094, |
|
"learning_rate": 2.2301620456894087e-06, |
|
"loss": 0.1951, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 4.639299927442229, |
|
"grad_norm": 220.25010681152344, |
|
"learning_rate": 2.1642004353466283e-06, |
|
"loss": 0.2243, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 4.650293529166025, |
|
"grad_norm": 0.11063925921916962, |
|
"learning_rate": 2.098238825003848e-06, |
|
"loss": 0.1862, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 4.661287130889822, |
|
"grad_norm": 0.9656747579574585, |
|
"learning_rate": 2.0322772146610674e-06, |
|
"loss": 0.1796, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 4.672280732613618, |
|
"grad_norm": 0.03588191047310829, |
|
"learning_rate": 1.966315604318287e-06, |
|
"loss": 0.1936, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 4.6832743343374155, |
|
"grad_norm": 25.791149139404297, |
|
"learning_rate": 1.9003539939755062e-06, |
|
"loss": 0.2102, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 4.6942679360612125, |
|
"grad_norm": 1.5398284196853638, |
|
"learning_rate": 1.8343923836327257e-06, |
|
"loss": 0.1941, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 4.7052615377850096, |
|
"grad_norm": 0.8514572978019714, |
|
"learning_rate": 1.7684307732899453e-06, |
|
"loss": 0.2124, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 4.716255139508806, |
|
"grad_norm": 0.1109534353017807, |
|
"learning_rate": 1.7024691629471647e-06, |
|
"loss": 0.1896, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 4.727248741232603, |
|
"grad_norm": 33.977500915527344, |
|
"learning_rate": 1.6365075526043843e-06, |
|
"loss": 0.1868, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 4.738242342956399, |
|
"grad_norm": 0.09221459925174713, |
|
"learning_rate": 1.5705459422616038e-06, |
|
"loss": 0.2147, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 4.749235944680196, |
|
"grad_norm": 0.13753363490104675, |
|
"learning_rate": 1.5045843319188232e-06, |
|
"loss": 0.1805, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 4.760229546403993, |
|
"grad_norm": 0.03300468996167183, |
|
"learning_rate": 1.4386227215760428e-06, |
|
"loss": 0.195, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 4.77122314812779, |
|
"grad_norm": 52.13509750366211, |
|
"learning_rate": 1.3726611112332624e-06, |
|
"loss": 0.1934, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 4.782216749851586, |
|
"grad_norm": 0.05201047658920288, |
|
"learning_rate": 1.3066995008904817e-06, |
|
"loss": 0.1986, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 4.793210351575383, |
|
"grad_norm": 0.15796062350273132, |
|
"learning_rate": 1.2407378905477013e-06, |
|
"loss": 0.189, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 4.80420395329918, |
|
"grad_norm": 0.603727400302887, |
|
"learning_rate": 1.1747762802049207e-06, |
|
"loss": 0.194, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 4.815197555022976, |
|
"grad_norm": 19.412994384765625, |
|
"learning_rate": 1.1088146698621402e-06, |
|
"loss": 0.2022, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 4.8261911567467735, |
|
"grad_norm": 0.08504907041788101, |
|
"learning_rate": 1.0428530595193598e-06, |
|
"loss": 0.1858, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 4.8371847584705705, |
|
"grad_norm": 0.07863516360521317, |
|
"learning_rate": 9.768914491765792e-07, |
|
"loss": 0.1942, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 4.848178360194367, |
|
"grad_norm": 23.51129722595215, |
|
"learning_rate": 9.109298388337987e-07, |
|
"loss": 0.1912, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 4.859171961918164, |
|
"grad_norm": 5.780854225158691, |
|
"learning_rate": 8.449682284910183e-07, |
|
"loss": 0.2185, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 4.870165563641961, |
|
"grad_norm": 0.05857408419251442, |
|
"learning_rate": 7.790066181482378e-07, |
|
"loss": 0.1914, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 4.881159165365757, |
|
"grad_norm": 0.18249481916427612, |
|
"learning_rate": 7.130450078054573e-07, |
|
"loss": 0.1738, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 4.892152767089554, |
|
"grad_norm": 418.6382751464844, |
|
"learning_rate": 6.470833974626767e-07, |
|
"loss": 0.2041, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 4.903146368813351, |
|
"grad_norm": 0.7230046987533569, |
|
"learning_rate": 5.811217871198962e-07, |
|
"loss": 0.1957, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 4.914139970537147, |
|
"grad_norm": 0.05270848050713539, |
|
"learning_rate": 5.151601767771158e-07, |
|
"loss": 0.1835, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 4.925133572260944, |
|
"grad_norm": 1709.6539306640625, |
|
"learning_rate": 4.491985664343352e-07, |
|
"loss": 0.1921, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 4.936127173984741, |
|
"grad_norm": 602.2431640625, |
|
"learning_rate": 3.8323695609155474e-07, |
|
"loss": 0.1727, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 4.947120775708537, |
|
"grad_norm": 0.297931432723999, |
|
"learning_rate": 3.1727534574877426e-07, |
|
"loss": 0.1915, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 4.958114377432334, |
|
"grad_norm": 0.3502364456653595, |
|
"learning_rate": 2.5131373540599373e-07, |
|
"loss": 0.1776, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 4.9691079791561314, |
|
"grad_norm": 0.023652415722608566, |
|
"learning_rate": 1.8535212506321323e-07, |
|
"loss": 0.1978, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 4.980101580879928, |
|
"grad_norm": 0.24965056777000427, |
|
"learning_rate": 1.1939051472043273e-07, |
|
"loss": 0.1924, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 4.991095182603725, |
|
"grad_norm": 0.07366069406270981, |
|
"learning_rate": 5.342890437765221e-08, |
|
"loss": 0.1959, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.8784565925598145, |
|
"eval_f1": 0.8391068037456617, |
|
"eval_loss": 0.6183628439903259, |
|
"eval_mcc": 0.7416813202825935, |
|
"eval_runtime": 27.9497, |
|
"eval_samples_per_second": 723.264, |
|
"eval_steps_per_second": 90.412, |
|
"step": 227405 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 227405, |
|
"total_flos": 1.0733045580407808e+17, |
|
"train_loss": 0.30088049875882883, |
|
"train_runtime": 13193.5971, |
|
"train_samples_per_second": 137.887, |
|
"train_steps_per_second": 17.236 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 227405, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 3, |
|
"early_stopping_threshold": 0.001 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.0733045580407808e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|