|
{
|
|
"best_metric": 1.1422123908996582,
|
|
"best_model_checkpoint": "../res/Transformers/VIT-ASVspoof5-MFCC-Synthetic-Voice-Detection\\checkpoint-22795",
|
|
"epoch": 3.0,
|
|
"eval_steps": 500,
|
|
"global_step": 68385,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.021934634788330774,
|
|
"grad_norm": 1.6002360582351685,
|
|
"learning_rate": 4.963442275352782e-05,
|
|
"loss": 0.196,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.04386926957666155,
|
|
"grad_norm": 0.0844896212220192,
|
|
"learning_rate": 4.926884550705564e-05,
|
|
"loss": 0.119,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.06580390436499232,
|
|
"grad_norm": 0.06206677481532097,
|
|
"learning_rate": 4.890326826058346e-05,
|
|
"loss": 0.1112,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 0.0877385391533231,
|
|
"grad_norm": 0.030973296612501144,
|
|
"learning_rate": 4.853769101411129e-05,
|
|
"loss": 0.0863,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 0.10967317394165386,
|
|
"grad_norm": 0.01769310049712658,
|
|
"learning_rate": 4.817211376763911e-05,
|
|
"loss": 0.0714,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 0.13160780872998465,
|
|
"grad_norm": 0.016247930005192757,
|
|
"learning_rate": 4.780653652116692e-05,
|
|
"loss": 0.0772,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 0.15354244351831542,
|
|
"grad_norm": 0.03868141025304794,
|
|
"learning_rate": 4.7440959274694746e-05,
|
|
"loss": 0.0815,
|
|
"step": 3500
|
|
},
|
|
{
|
|
"epoch": 0.1754770783066462,
|
|
"grad_norm": 0.03259115293622017,
|
|
"learning_rate": 4.7075382028222566e-05,
|
|
"loss": 0.0718,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"epoch": 0.19741171309497696,
|
|
"grad_norm": 0.20475254952907562,
|
|
"learning_rate": 4.6709804781750386e-05,
|
|
"loss": 0.0725,
|
|
"step": 4500
|
|
},
|
|
{
|
|
"epoch": 0.21934634788330773,
|
|
"grad_norm": 0.005047565791755915,
|
|
"learning_rate": 4.6344227535278205e-05,
|
|
"loss": 0.0673,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"epoch": 0.24128098267163853,
|
|
"grad_norm": 0.004908708389848471,
|
|
"learning_rate": 4.5978650288806025e-05,
|
|
"loss": 0.0626,
|
|
"step": 5500
|
|
},
|
|
{
|
|
"epoch": 0.2632156174599693,
|
|
"grad_norm": 0.004570414312183857,
|
|
"learning_rate": 4.561307304233385e-05,
|
|
"loss": 0.0643,
|
|
"step": 6000
|
|
},
|
|
{
|
|
"epoch": 0.28515025224830004,
|
|
"grad_norm": 0.37269970774650574,
|
|
"learning_rate": 4.5247495795861664e-05,
|
|
"loss": 0.0623,
|
|
"step": 6500
|
|
},
|
|
{
|
|
"epoch": 0.30708488703663084,
|
|
"grad_norm": 0.022570880129933357,
|
|
"learning_rate": 4.488191854938949e-05,
|
|
"loss": 0.0504,
|
|
"step": 7000
|
|
},
|
|
{
|
|
"epoch": 0.32901952182496164,
|
|
"grad_norm": 0.06680231541395187,
|
|
"learning_rate": 4.451634130291731e-05,
|
|
"loss": 0.063,
|
|
"step": 7500
|
|
},
|
|
{
|
|
"epoch": 0.3509541566132924,
|
|
"grad_norm": 0.33438870310783386,
|
|
"learning_rate": 4.415076405644513e-05,
|
|
"loss": 0.0709,
|
|
"step": 8000
|
|
},
|
|
{
|
|
"epoch": 0.3728887914016232,
|
|
"grad_norm": 0.004958014003932476,
|
|
"learning_rate": 4.378518680997295e-05,
|
|
"loss": 0.0436,
|
|
"step": 8500
|
|
},
|
|
{
|
|
"epoch": 0.3948234261899539,
|
|
"grad_norm": 0.03868183121085167,
|
|
"learning_rate": 4.341960956350077e-05,
|
|
"loss": 0.0433,
|
|
"step": 9000
|
|
},
|
|
{
|
|
"epoch": 0.4167580609782847,
|
|
"grad_norm": 0.002795830136165023,
|
|
"learning_rate": 4.3054032317028595e-05,
|
|
"loss": 0.0417,
|
|
"step": 9500
|
|
},
|
|
{
|
|
"epoch": 0.43869269576661546,
|
|
"grad_norm": 0.10845537483692169,
|
|
"learning_rate": 4.268845507055641e-05,
|
|
"loss": 0.0541,
|
|
"step": 10000
|
|
},
|
|
{
|
|
"epoch": 0.46062733055494626,
|
|
"grad_norm": 0.006278611719608307,
|
|
"learning_rate": 4.232287782408423e-05,
|
|
"loss": 0.0441,
|
|
"step": 10500
|
|
},
|
|
{
|
|
"epoch": 0.48256196534327706,
|
|
"grad_norm": 0.006182480603456497,
|
|
"learning_rate": 4.1957300577612054e-05,
|
|
"loss": 0.0428,
|
|
"step": 11000
|
|
},
|
|
{
|
|
"epoch": 0.5044966001316078,
|
|
"grad_norm": 0.1414467990398407,
|
|
"learning_rate": 4.159172333113987e-05,
|
|
"loss": 0.0434,
|
|
"step": 11500
|
|
},
|
|
{
|
|
"epoch": 0.5264312349199386,
|
|
"grad_norm": 0.008339487947523594,
|
|
"learning_rate": 4.122614608466769e-05,
|
|
"loss": 0.0426,
|
|
"step": 12000
|
|
},
|
|
{
|
|
"epoch": 0.5483658697082694,
|
|
"grad_norm": 0.0023136690724641085,
|
|
"learning_rate": 4.086056883819551e-05,
|
|
"loss": 0.0384,
|
|
"step": 12500
|
|
},
|
|
{
|
|
"epoch": 0.5703005044966001,
|
|
"grad_norm": 0.0026652878150343895,
|
|
"learning_rate": 4.049499159172334e-05,
|
|
"loss": 0.0396,
|
|
"step": 13000
|
|
},
|
|
{
|
|
"epoch": 0.5922351392849309,
|
|
"grad_norm": 0.006547579076141119,
|
|
"learning_rate": 4.012941434525115e-05,
|
|
"loss": 0.0469,
|
|
"step": 13500
|
|
},
|
|
{
|
|
"epoch": 0.6141697740732617,
|
|
"grad_norm": 0.35574808716773987,
|
|
"learning_rate": 3.976383709877897e-05,
|
|
"loss": 0.0433,
|
|
"step": 14000
|
|
},
|
|
{
|
|
"epoch": 0.6361044088615925,
|
|
"grad_norm": 0.08196010440587997,
|
|
"learning_rate": 3.93982598523068e-05,
|
|
"loss": 0.0315,
|
|
"step": 14500
|
|
},
|
|
{
|
|
"epoch": 0.6580390436499233,
|
|
"grad_norm": 0.006741166580468416,
|
|
"learning_rate": 3.903268260583462e-05,
|
|
"loss": 0.0363,
|
|
"step": 15000
|
|
},
|
|
{
|
|
"epoch": 0.679973678438254,
|
|
"grad_norm": 0.002502752933651209,
|
|
"learning_rate": 3.866710535936244e-05,
|
|
"loss": 0.0397,
|
|
"step": 15500
|
|
},
|
|
{
|
|
"epoch": 0.7019083132265848,
|
|
"grad_norm": 0.005271813366562128,
|
|
"learning_rate": 3.8301528112890256e-05,
|
|
"loss": 0.0339,
|
|
"step": 16000
|
|
},
|
|
{
|
|
"epoch": 0.7238429480149156,
|
|
"grad_norm": 0.008133799768984318,
|
|
"learning_rate": 3.7935950866418076e-05,
|
|
"loss": 0.0382,
|
|
"step": 16500
|
|
},
|
|
{
|
|
"epoch": 0.7457775828032464,
|
|
"grad_norm": 0.004074131604284048,
|
|
"learning_rate": 3.7570373619945896e-05,
|
|
"loss": 0.0311,
|
|
"step": 17000
|
|
},
|
|
{
|
|
"epoch": 0.7677122175915772,
|
|
"grad_norm": 0.09424427896738052,
|
|
"learning_rate": 3.7204796373473715e-05,
|
|
"loss": 0.031,
|
|
"step": 17500
|
|
},
|
|
{
|
|
"epoch": 0.7896468523799078,
|
|
"grad_norm": 0.0042045521549880505,
|
|
"learning_rate": 3.683921912700154e-05,
|
|
"loss": 0.031,
|
|
"step": 18000
|
|
},
|
|
{
|
|
"epoch": 0.8115814871682386,
|
|
"grad_norm": 0.0019285727757960558,
|
|
"learning_rate": 3.6473641880529354e-05,
|
|
"loss": 0.0459,
|
|
"step": 18500
|
|
},
|
|
{
|
|
"epoch": 0.8335161219565694,
|
|
"grad_norm": 0.0684182420372963,
|
|
"learning_rate": 3.6108064634057174e-05,
|
|
"loss": 0.0304,
|
|
"step": 19000
|
|
},
|
|
{
|
|
"epoch": 0.8554507567449002,
|
|
"grad_norm": 0.0006936299032531679,
|
|
"learning_rate": 3.5742487387585e-05,
|
|
"loss": 0.0188,
|
|
"step": 19500
|
|
},
|
|
{
|
|
"epoch": 0.8773853915332309,
|
|
"grad_norm": 0.001652455423027277,
|
|
"learning_rate": 3.537691014111282e-05,
|
|
"loss": 0.0386,
|
|
"step": 20000
|
|
},
|
|
{
|
|
"epoch": 0.8993200263215617,
|
|
"grad_norm": 0.0018680962966755033,
|
|
"learning_rate": 3.501133289464064e-05,
|
|
"loss": 0.025,
|
|
"step": 20500
|
|
},
|
|
{
|
|
"epoch": 0.9212546611098925,
|
|
"grad_norm": 0.0016410767566412687,
|
|
"learning_rate": 3.464575564816846e-05,
|
|
"loss": 0.032,
|
|
"step": 21000
|
|
},
|
|
{
|
|
"epoch": 0.9431892958982233,
|
|
"grad_norm": 0.003180338069796562,
|
|
"learning_rate": 3.428017840169628e-05,
|
|
"loss": 0.0377,
|
|
"step": 21500
|
|
},
|
|
{
|
|
"epoch": 0.9651239306865541,
|
|
"grad_norm": 0.6684175133705139,
|
|
"learning_rate": 3.39146011552241e-05,
|
|
"loss": 0.0365,
|
|
"step": 22000
|
|
},
|
|
{
|
|
"epoch": 0.9870585654748848,
|
|
"grad_norm": 0.0036250813864171505,
|
|
"learning_rate": 3.354902390875192e-05,
|
|
"loss": 0.0335,
|
|
"step": 22500
|
|
},
|
|
{
|
|
"epoch": 1.0,
|
|
"eval_accuracy": 0.7655196878325647,
|
|
"eval_f1": 0.8410950737068841,
|
|
"eval_loss": 1.1422123908996582,
|
|
"eval_precision": 0.8891735285147911,
|
|
"eval_recall": 0.7979492044956941,
|
|
"eval_runtime": 610.1257,
|
|
"eval_samples_per_second": 231.018,
|
|
"eval_steps_per_second": 28.878,
|
|
"step": 22795
|
|
},
|
|
{
|
|
"epoch": 1.0089932002632156,
|
|
"grad_norm": 0.0024800270330160856,
|
|
"learning_rate": 3.3183446662279744e-05,
|
|
"loss": 0.0306,
|
|
"step": 23000
|
|
},
|
|
{
|
|
"epoch": 1.0309278350515463,
|
|
"grad_norm": 0.0013444307260215282,
|
|
"learning_rate": 3.2817869415807564e-05,
|
|
"loss": 0.0183,
|
|
"step": 23500
|
|
},
|
|
{
|
|
"epoch": 1.0528624698398772,
|
|
"grad_norm": 0.0013695446541532874,
|
|
"learning_rate": 3.245229216933538e-05,
|
|
"loss": 0.0208,
|
|
"step": 24000
|
|
},
|
|
{
|
|
"epoch": 1.0747971046282079,
|
|
"grad_norm": 0.0007420779438689351,
|
|
"learning_rate": 3.20867149228632e-05,
|
|
"loss": 0.0193,
|
|
"step": 24500
|
|
},
|
|
{
|
|
"epoch": 1.0967317394165388,
|
|
"grad_norm": 0.0015616191085428,
|
|
"learning_rate": 3.172113767639102e-05,
|
|
"loss": 0.0277,
|
|
"step": 25000
|
|
},
|
|
{
|
|
"epoch": 1.1186663742048695,
|
|
"grad_norm": 0.026919562369585037,
|
|
"learning_rate": 3.135556042991884e-05,
|
|
"loss": 0.02,
|
|
"step": 25500
|
|
},
|
|
{
|
|
"epoch": 1.1406010089932002,
|
|
"grad_norm": 0.0016685453010722995,
|
|
"learning_rate": 3.098998318344666e-05,
|
|
"loss": 0.0214,
|
|
"step": 26000
|
|
},
|
|
{
|
|
"epoch": 1.162535643781531,
|
|
"grad_norm": 0.001420316519215703,
|
|
"learning_rate": 3.062440593697448e-05,
|
|
"loss": 0.0316,
|
|
"step": 26500
|
|
},
|
|
{
|
|
"epoch": 1.1844702785698618,
|
|
"grad_norm": 0.0037766392342746258,
|
|
"learning_rate": 3.0258828690502304e-05,
|
|
"loss": 0.0086,
|
|
"step": 27000
|
|
},
|
|
{
|
|
"epoch": 1.2064049133581927,
|
|
"grad_norm": 0.3518999516963959,
|
|
"learning_rate": 2.9893251444030124e-05,
|
|
"loss": 0.0307,
|
|
"step": 27500
|
|
},
|
|
{
|
|
"epoch": 1.2283395481465234,
|
|
"grad_norm": 0.004498172085732222,
|
|
"learning_rate": 2.9527674197557947e-05,
|
|
"loss": 0.013,
|
|
"step": 28000
|
|
},
|
|
{
|
|
"epoch": 1.250274182934854,
|
|
"grad_norm": 0.20039166510105133,
|
|
"learning_rate": 2.9162096951085767e-05,
|
|
"loss": 0.0237,
|
|
"step": 28500
|
|
},
|
|
{
|
|
"epoch": 1.272208817723185,
|
|
"grad_norm": 0.009834195487201214,
|
|
"learning_rate": 2.8796519704613583e-05,
|
|
"loss": 0.0303,
|
|
"step": 29000
|
|
},
|
|
{
|
|
"epoch": 1.2941434525115156,
|
|
"grad_norm": 0.012343469075858593,
|
|
"learning_rate": 2.8430942458141406e-05,
|
|
"loss": 0.021,
|
|
"step": 29500
|
|
},
|
|
{
|
|
"epoch": 1.3160780872998465,
|
|
"grad_norm": 0.000986404367722571,
|
|
"learning_rate": 2.8065365211669225e-05,
|
|
"loss": 0.0182,
|
|
"step": 30000
|
|
},
|
|
{
|
|
"epoch": 1.3380127220881772,
|
|
"grad_norm": 7.985368728637695,
|
|
"learning_rate": 2.769978796519705e-05,
|
|
"loss": 0.0166,
|
|
"step": 30500
|
|
},
|
|
{
|
|
"epoch": 1.359947356876508,
|
|
"grad_norm": 86.72069549560547,
|
|
"learning_rate": 2.7334210718724868e-05,
|
|
"loss": 0.0279,
|
|
"step": 31000
|
|
},
|
|
{
|
|
"epoch": 1.3818819916648388,
|
|
"grad_norm": 6.640265464782715,
|
|
"learning_rate": 2.6968633472252684e-05,
|
|
"loss": 0.0211,
|
|
"step": 31500
|
|
},
|
|
{
|
|
"epoch": 1.4038166264531695,
|
|
"grad_norm": 0.08287691324949265,
|
|
"learning_rate": 2.660305622578051e-05,
|
|
"loss": 0.0208,
|
|
"step": 32000
|
|
},
|
|
{
|
|
"epoch": 1.4257512612415004,
|
|
"grad_norm": 0.001100863330066204,
|
|
"learning_rate": 2.6237478979308327e-05,
|
|
"loss": 0.0186,
|
|
"step": 32500
|
|
},
|
|
{
|
|
"epoch": 1.447685896029831,
|
|
"grad_norm": 0.0005861782701686025,
|
|
"learning_rate": 2.587190173283615e-05,
|
|
"loss": 0.0198,
|
|
"step": 33000
|
|
},
|
|
{
|
|
"epoch": 1.4696205308181618,
|
|
"grad_norm": 0.01762578822672367,
|
|
"learning_rate": 2.550632448636397e-05,
|
|
"loss": 0.0143,
|
|
"step": 33500
|
|
},
|
|
{
|
|
"epoch": 1.4915551656064927,
|
|
"grad_norm": 0.0006869534263387322,
|
|
"learning_rate": 2.514074723989179e-05,
|
|
"loss": 0.0198,
|
|
"step": 34000
|
|
},
|
|
{
|
|
"epoch": 1.5134898003948234,
|
|
"grad_norm": 0.0006381907733157277,
|
|
"learning_rate": 2.4775169993419612e-05,
|
|
"loss": 0.02,
|
|
"step": 34500
|
|
},
|
|
{
|
|
"epoch": 1.5354244351831543,
|
|
"grad_norm": 0.00030707582482136786,
|
|
"learning_rate": 2.440959274694743e-05,
|
|
"loss": 0.0112,
|
|
"step": 35000
|
|
},
|
|
{
|
|
"epoch": 1.557359069971485,
|
|
"grad_norm": 0.0010392458643764257,
|
|
"learning_rate": 2.404401550047525e-05,
|
|
"loss": 0.0181,
|
|
"step": 35500
|
|
},
|
|
{
|
|
"epoch": 1.5792937047598157,
|
|
"grad_norm": 0.0007690805359743536,
|
|
"learning_rate": 2.367843825400307e-05,
|
|
"loss": 0.0107,
|
|
"step": 36000
|
|
},
|
|
{
|
|
"epoch": 1.6012283395481464,
|
|
"grad_norm": 0.0006547980010509491,
|
|
"learning_rate": 2.3312861007530894e-05,
|
|
"loss": 0.0215,
|
|
"step": 36500
|
|
},
|
|
{
|
|
"epoch": 1.6231629743364773,
|
|
"grad_norm": 0.0005545192980207503,
|
|
"learning_rate": 2.2947283761058713e-05,
|
|
"loss": 0.0187,
|
|
"step": 37000
|
|
},
|
|
{
|
|
"epoch": 1.6450976091248082,
|
|
"grad_norm": 0.02285657450556755,
|
|
"learning_rate": 2.2581706514586533e-05,
|
|
"loss": 0.0164,
|
|
"step": 37500
|
|
},
|
|
{
|
|
"epoch": 1.6670322439131389,
|
|
"grad_norm": 0.005256785545498133,
|
|
"learning_rate": 2.2216129268114352e-05,
|
|
"loss": 0.0174,
|
|
"step": 38000
|
|
},
|
|
{
|
|
"epoch": 1.6889668787014696,
|
|
"grad_norm": 0.006920721847563982,
|
|
"learning_rate": 2.1850552021642172e-05,
|
|
"loss": 0.0191,
|
|
"step": 38500
|
|
},
|
|
{
|
|
"epoch": 1.7109015134898002,
|
|
"grad_norm": 0.00019583749235607684,
|
|
"learning_rate": 2.1484974775169995e-05,
|
|
"loss": 0.0115,
|
|
"step": 39000
|
|
},
|
|
{
|
|
"epoch": 1.7328361482781311,
|
|
"grad_norm": 1.1474499702453613,
|
|
"learning_rate": 2.1119397528697815e-05,
|
|
"loss": 0.0122,
|
|
"step": 39500
|
|
},
|
|
{
|
|
"epoch": 1.754770783066462,
|
|
"grad_norm": 0.00036563395406119525,
|
|
"learning_rate": 2.0753820282225638e-05,
|
|
"loss": 0.0044,
|
|
"step": 40000
|
|
},
|
|
{
|
|
"epoch": 1.7767054178547927,
|
|
"grad_norm": 0.0001949958095792681,
|
|
"learning_rate": 2.0388243035753457e-05,
|
|
"loss": 0.0136,
|
|
"step": 40500
|
|
},
|
|
{
|
|
"epoch": 1.7986400526431234,
|
|
"grad_norm": 0.00035594747168943286,
|
|
"learning_rate": 2.0022665789281277e-05,
|
|
"loss": 0.0144,
|
|
"step": 41000
|
|
},
|
|
{
|
|
"epoch": 1.8205746874314541,
|
|
"grad_norm": 0.0013882976491004229,
|
|
"learning_rate": 1.9657088542809096e-05,
|
|
"loss": 0.0172,
|
|
"step": 41500
|
|
},
|
|
{
|
|
"epoch": 1.842509322219785,
|
|
"grad_norm": 0.006421489175409079,
|
|
"learning_rate": 1.9291511296336916e-05,
|
|
"loss": 0.0105,
|
|
"step": 42000
|
|
},
|
|
{
|
|
"epoch": 1.864443957008116,
|
|
"grad_norm": 0.0001415271544829011,
|
|
"learning_rate": 1.892593404986474e-05,
|
|
"loss": 0.0066,
|
|
"step": 42500
|
|
},
|
|
{
|
|
"epoch": 1.8863785917964466,
|
|
"grad_norm": 0.009500663727521896,
|
|
"learning_rate": 1.856035680339256e-05,
|
|
"loss": 0.0206,
|
|
"step": 43000
|
|
},
|
|
{
|
|
"epoch": 1.9083132265847773,
|
|
"grad_norm": 0.0002867148432414979,
|
|
"learning_rate": 1.8194779556920378e-05,
|
|
"loss": 0.0138,
|
|
"step": 43500
|
|
},
|
|
{
|
|
"epoch": 1.930247861373108,
|
|
"grad_norm": 0.027878765016794205,
|
|
"learning_rate": 1.7829202310448198e-05,
|
|
"loss": 0.0075,
|
|
"step": 44000
|
|
},
|
|
{
|
|
"epoch": 1.952182496161439,
|
|
"grad_norm": 0.01618758961558342,
|
|
"learning_rate": 1.7463625063976017e-05,
|
|
"loss": 0.0091,
|
|
"step": 44500
|
|
},
|
|
{
|
|
"epoch": 1.9741171309497698,
|
|
"grad_norm": 0.00012132512347307056,
|
|
"learning_rate": 1.709804781750384e-05,
|
|
"loss": 0.0159,
|
|
"step": 45000
|
|
},
|
|
{
|
|
"epoch": 1.9960517657381005,
|
|
"grad_norm": 0.07614452391862869,
|
|
"learning_rate": 1.673247057103166e-05,
|
|
"loss": 0.0104,
|
|
"step": 45500
|
|
},
|
|
{
|
|
"epoch": 2.0,
|
|
"eval_accuracy": 0.630138346931536,
|
|
"eval_f1": 0.6978695783202356,
|
|
"eval_loss": 1.9972326755523682,
|
|
"eval_precision": 0.9567151846437425,
|
|
"eval_recall": 0.5492628813311925,
|
|
"eval_runtime": 543.453,
|
|
"eval_samples_per_second": 259.36,
|
|
"eval_steps_per_second": 32.42,
|
|
"step": 45590
|
|
},
|
|
{
|
|
"epoch": 2.017986400526431,
|
|
"grad_norm": 0.00026925330166704953,
|
|
"learning_rate": 1.636689332455948e-05,
|
|
"loss": 0.0074,
|
|
"step": 46000
|
|
},
|
|
{
|
|
"epoch": 2.039921035314762,
|
|
"grad_norm": 0.00010326378105673939,
|
|
"learning_rate": 1.60013160780873e-05,
|
|
"loss": 0.0013,
|
|
"step": 46500
|
|
},
|
|
{
|
|
"epoch": 2.0618556701030926,
|
|
"grad_norm": 0.007613280788064003,
|
|
"learning_rate": 1.5635738831615122e-05,
|
|
"loss": 0.0094,
|
|
"step": 47000
|
|
},
|
|
{
|
|
"epoch": 2.0837903048914237,
|
|
"grad_norm": 9.309263259638101e-05,
|
|
"learning_rate": 1.5270161585142942e-05,
|
|
"loss": 0.0013,
|
|
"step": 47500
|
|
},
|
|
{
|
|
"epoch": 2.1057249396797544,
|
|
"grad_norm": 9.470985241932794e-05,
|
|
"learning_rate": 1.4904584338670763e-05,
|
|
"loss": 0.0097,
|
|
"step": 48000
|
|
},
|
|
{
|
|
"epoch": 2.127659574468085,
|
|
"grad_norm": 0.0001439831539755687,
|
|
"learning_rate": 1.4539007092198581e-05,
|
|
"loss": 0.0038,
|
|
"step": 48500
|
|
},
|
|
{
|
|
"epoch": 2.1495942092564158,
|
|
"grad_norm": 8.433873153990135e-05,
|
|
"learning_rate": 1.4173429845726402e-05,
|
|
"loss": 0.0062,
|
|
"step": 49000
|
|
},
|
|
{
|
|
"epoch": 2.1715288440447464,
|
|
"grad_norm": 14.641709327697754,
|
|
"learning_rate": 1.3807852599254223e-05,
|
|
"loss": 0.0094,
|
|
"step": 49500
|
|
},
|
|
{
|
|
"epoch": 2.1934634788330776,
|
|
"grad_norm": 0.00010519224451854825,
|
|
"learning_rate": 1.3442275352782043e-05,
|
|
"loss": 0.0068,
|
|
"step": 50000
|
|
},
|
|
{
|
|
"epoch": 2.2153981136214083,
|
|
"grad_norm": 7.412636477965862e-05,
|
|
"learning_rate": 1.3076698106309864e-05,
|
|
"loss": 0.0081,
|
|
"step": 50500
|
|
},
|
|
{
|
|
"epoch": 2.237332748409739,
|
|
"grad_norm": 0.00046737337834201753,
|
|
"learning_rate": 1.2711120859837682e-05,
|
|
"loss": 0.0064,
|
|
"step": 51000
|
|
},
|
|
{
|
|
"epoch": 2.2592673831980696,
|
|
"grad_norm": 0.00011982124851783738,
|
|
"learning_rate": 1.2345543613365505e-05,
|
|
"loss": 0.0081,
|
|
"step": 51500
|
|
},
|
|
{
|
|
"epoch": 2.2812020179864003,
|
|
"grad_norm": 0.000447772559709847,
|
|
"learning_rate": 1.1979966366893325e-05,
|
|
"loss": 0.0063,
|
|
"step": 52000
|
|
},
|
|
{
|
|
"epoch": 2.3031366527747315,
|
|
"grad_norm": 8.771315333433449e-05,
|
|
"learning_rate": 1.1614389120421146e-05,
|
|
"loss": 0.0055,
|
|
"step": 52500
|
|
},
|
|
{
|
|
"epoch": 2.325071287563062,
|
|
"grad_norm": 0.00023320669424720109,
|
|
"learning_rate": 1.1248811873948966e-05,
|
|
"loss": 0.0077,
|
|
"step": 53000
|
|
},
|
|
{
|
|
"epoch": 2.347005922351393,
|
|
"grad_norm": 0.0011541806161403656,
|
|
"learning_rate": 1.0883234627476787e-05,
|
|
"loss": 0.0021,
|
|
"step": 53500
|
|
},
|
|
{
|
|
"epoch": 2.3689405571397235,
|
|
"grad_norm": 5.727548705181107e-05,
|
|
"learning_rate": 1.0517657381004607e-05,
|
|
"loss": 0.0026,
|
|
"step": 54000
|
|
},
|
|
{
|
|
"epoch": 2.390875191928054,
|
|
"grad_norm": 6.0747242969227955e-05,
|
|
"learning_rate": 1.0152080134532426e-05,
|
|
"loss": 0.0106,
|
|
"step": 54500
|
|
},
|
|
{
|
|
"epoch": 2.4128098267163853,
|
|
"grad_norm": 5.967422839603387e-05,
|
|
"learning_rate": 9.786502888060247e-06,
|
|
"loss": 0.0055,
|
|
"step": 55000
|
|
},
|
|
{
|
|
"epoch": 2.434744461504716,
|
|
"grad_norm": 0.0001334139669779688,
|
|
"learning_rate": 9.420925641588069e-06,
|
|
"loss": 0.0064,
|
|
"step": 55500
|
|
},
|
|
{
|
|
"epoch": 2.4566790962930467,
|
|
"grad_norm": 0.0001322976895608008,
|
|
"learning_rate": 9.055348395115888e-06,
|
|
"loss": 0.0055,
|
|
"step": 56000
|
|
},
|
|
{
|
|
"epoch": 2.4786137310813774,
|
|
"grad_norm": 5.676386717823334e-05,
|
|
"learning_rate": 8.68977114864371e-06,
|
|
"loss": 0.0017,
|
|
"step": 56500
|
|
},
|
|
{
|
|
"epoch": 2.500548365869708,
|
|
"grad_norm": 5.234468335402198e-05,
|
|
"learning_rate": 8.32419390217153e-06,
|
|
"loss": 0.0021,
|
|
"step": 57000
|
|
},
|
|
{
|
|
"epoch": 2.522483000658039,
|
|
"grad_norm": 0.0001186780136777088,
|
|
"learning_rate": 7.95861665569935e-06,
|
|
"loss": 0.0069,
|
|
"step": 57500
|
|
},
|
|
{
|
|
"epoch": 2.54441763544637,
|
|
"grad_norm": 8.035052451305091e-05,
|
|
"learning_rate": 7.593039409227171e-06,
|
|
"loss": 0.0034,
|
|
"step": 58000
|
|
},
|
|
{
|
|
"epoch": 2.5663522702347006,
|
|
"grad_norm": 5.069562394055538e-05,
|
|
"learning_rate": 7.2274621627549905e-06,
|
|
"loss": 0.0051,
|
|
"step": 58500
|
|
},
|
|
{
|
|
"epoch": 2.5882869050230313,
|
|
"grad_norm": 4.755818372359499e-05,
|
|
"learning_rate": 6.861884916282811e-06,
|
|
"loss": 0.0016,
|
|
"step": 59000
|
|
},
|
|
{
|
|
"epoch": 2.610221539811362,
|
|
"grad_norm": 0.00010292666411260143,
|
|
"learning_rate": 6.496307669810631e-06,
|
|
"loss": 0.001,
|
|
"step": 59500
|
|
},
|
|
{
|
|
"epoch": 2.632156174599693,
|
|
"grad_norm": 0.00021975964773446321,
|
|
"learning_rate": 6.130730423338452e-06,
|
|
"loss": 0.0037,
|
|
"step": 60000
|
|
},
|
|
{
|
|
"epoch": 2.6540908093880238,
|
|
"grad_norm": 6.505852797999978e-05,
|
|
"learning_rate": 5.7651531768662715e-06,
|
|
"loss": 0.0,
|
|
"step": 60500
|
|
},
|
|
{
|
|
"epoch": 2.6760254441763545,
|
|
"grad_norm": 6.606371607631445e-05,
|
|
"learning_rate": 5.399575930394093e-06,
|
|
"loss": 0.0066,
|
|
"step": 61000
|
|
},
|
|
{
|
|
"epoch": 2.697960078964685,
|
|
"grad_norm": 0.0001679928245721385,
|
|
"learning_rate": 5.033998683921913e-06,
|
|
"loss": 0.0,
|
|
"step": 61500
|
|
},
|
|
{
|
|
"epoch": 2.719894713753016,
|
|
"grad_norm": 8.914883801480755e-05,
|
|
"learning_rate": 4.668421437449734e-06,
|
|
"loss": 0.0016,
|
|
"step": 62000
|
|
},
|
|
{
|
|
"epoch": 2.741829348541347,
|
|
"grad_norm": 6.058572034817189e-05,
|
|
"learning_rate": 4.302844190977553e-06,
|
|
"loss": 0.0025,
|
|
"step": 62500
|
|
},
|
|
{
|
|
"epoch": 2.7637639833296777,
|
|
"grad_norm": 6.485241465270519e-05,
|
|
"learning_rate": 3.9372669445053745e-06,
|
|
"loss": 0.0023,
|
|
"step": 63000
|
|
},
|
|
{
|
|
"epoch": 2.7856986181180083,
|
|
"grad_norm": 7.410500984406099e-05,
|
|
"learning_rate": 3.5716896980331945e-06,
|
|
"loss": 0.0037,
|
|
"step": 63500
|
|
},
|
|
{
|
|
"epoch": 2.807633252906339,
|
|
"grad_norm": 0.0005128015764057636,
|
|
"learning_rate": 3.206112451561015e-06,
|
|
"loss": 0.006,
|
|
"step": 64000
|
|
},
|
|
{
|
|
"epoch": 2.8295678876946697,
|
|
"grad_norm": 4.0748513129074126e-05,
|
|
"learning_rate": 2.8405352050888354e-06,
|
|
"loss": 0.0001,
|
|
"step": 64500
|
|
},
|
|
{
|
|
"epoch": 2.851502522483001,
|
|
"grad_norm": 3.7613295717164874e-05,
|
|
"learning_rate": 2.474957958616656e-06,
|
|
"loss": 0.0019,
|
|
"step": 65000
|
|
},
|
|
{
|
|
"epoch": 2.8734371572713315,
|
|
"grad_norm": 0.00010410555842099711,
|
|
"learning_rate": 2.109380712144476e-06,
|
|
"loss": 0.0051,
|
|
"step": 65500
|
|
},
|
|
{
|
|
"epoch": 2.895371792059662,
|
|
"grad_norm": 6.10389542998746e-05,
|
|
"learning_rate": 1.7438034656722967e-06,
|
|
"loss": 0.0017,
|
|
"step": 66000
|
|
},
|
|
{
|
|
"epoch": 2.917306426847993,
|
|
"grad_norm": 8.636952406959608e-05,
|
|
"learning_rate": 1.3782262192001172e-06,
|
|
"loss": 0.0004,
|
|
"step": 66500
|
|
},
|
|
{
|
|
"epoch": 2.9392410616363236,
|
|
"grad_norm": 3.7596972106257454e-05,
|
|
"learning_rate": 1.0126489727279376e-06,
|
|
"loss": 0.0029,
|
|
"step": 67000
|
|
},
|
|
{
|
|
"epoch": 2.9611756964246547,
|
|
"grad_norm": 0.037345826625823975,
|
|
"learning_rate": 6.470717262557578e-07,
|
|
"loss": 0.0014,
|
|
"step": 67500
|
|
},
|
|
{
|
|
"epoch": 2.9831103312129854,
|
|
"grad_norm": 8.701591286808252e-05,
|
|
"learning_rate": 2.814944797835783e-07,
|
|
"loss": 0.0035,
|
|
"step": 68000
|
|
},
|
|
{
|
|
"epoch": 3.0,
|
|
"eval_accuracy": 0.686952820148989,
|
|
"eval_f1": 0.7634000386075542,
|
|
"eval_loss": 2.8474719524383545,
|
|
"eval_precision": 0.9259586867162704,
|
|
"eval_recall": 0.6493942490147424,
|
|
"eval_runtime": 536.0323,
|
|
"eval_samples_per_second": 262.951,
|
|
"eval_steps_per_second": 32.869,
|
|
"step": 68385
|
|
},
|
|
{
|
|
"epoch": 3.0,
|
|
"step": 68385,
|
|
"total_flos": 4.2393620250444464e+19,
|
|
"train_loss": 0.02510238072675637,
|
|
"train_runtime": 5987.0649,
|
|
"train_samples_per_second": 91.375,
|
|
"train_steps_per_second": 11.422
|
|
}
|
|
],
|
|
"logging_steps": 500,
|
|
"max_steps": 68385,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 3,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": true
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 4.2393620250444464e+19,
|
|
"train_batch_size": 8,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|