|
{ |
|
"best_metric": 1.437897801399231, |
|
"best_model_checkpoint": "runs/deepseek_lora_20240422-165831/checkpoint-7500", |
|
"epoch": 0.1875, |
|
"eval_steps": 500, |
|
"global_step": 7500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.7229208946228027, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 2.2066, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 5.007352828979492, |
|
"learning_rate": 8.000000000000001e-07, |
|
"loss": 2.1122, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.8536276817321777, |
|
"learning_rate": 1.2000000000000002e-06, |
|
"loss": 2.2073, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 6.411635875701904, |
|
"learning_rate": 1.6000000000000001e-06, |
|
"loss": 2.3019, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 8.24316692352295, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 2.1321, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.362759113311768, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 2.1916, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.222783088684082, |
|
"learning_rate": 2.8000000000000003e-06, |
|
"loss": 2.2857, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.4690635204315186, |
|
"learning_rate": 3.2000000000000003e-06, |
|
"loss": 2.2253, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.878602981567383, |
|
"learning_rate": 3.6000000000000003e-06, |
|
"loss": 2.1578, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.750248908996582, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 2.0044, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 7.198167324066162, |
|
"learning_rate": 4.4e-06, |
|
"loss": 1.8984, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.500101327896118, |
|
"learning_rate": 4.800000000000001e-06, |
|
"loss": 1.9501, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 7.465259552001953, |
|
"learning_rate": 5.2e-06, |
|
"loss": 1.8413, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.6927084922790527, |
|
"learning_rate": 5.600000000000001e-06, |
|
"loss": 2.0101, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 8.518953323364258, |
|
"learning_rate": 6e-06, |
|
"loss": 1.8778, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.7254889011383057, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 1.8361, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.634269714355469, |
|
"learning_rate": 6.800000000000001e-06, |
|
"loss": 1.6214, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 8.053196907043457, |
|
"learning_rate": 7.2000000000000005e-06, |
|
"loss": 1.7552, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 3.407431125640869, |
|
"learning_rate": 7.600000000000001e-06, |
|
"loss": 1.8447, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 8.015592575073242, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.736, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 10.115213394165039, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 1.5497, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.565807819366455, |
|
"learning_rate": 8.8e-06, |
|
"loss": 1.6854, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.260842800140381, |
|
"learning_rate": 9.200000000000002e-06, |
|
"loss": 1.986, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.239616394042969, |
|
"learning_rate": 9.600000000000001e-06, |
|
"loss": 1.5284, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.048999786376953, |
|
"learning_rate": 1e-05, |
|
"loss": 1.7342, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.2346177101135254, |
|
"learning_rate": 1.04e-05, |
|
"loss": 1.8604, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.236307144165039, |
|
"learning_rate": 1.0800000000000002e-05, |
|
"loss": 1.6303, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.784558057785034, |
|
"learning_rate": 1.1200000000000001e-05, |
|
"loss": 1.6309, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.2512731552124023, |
|
"learning_rate": 1.16e-05, |
|
"loss": 1.8278, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.695291996002197, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.6175, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.554133892059326, |
|
"learning_rate": 1.2400000000000002e-05, |
|
"loss": 1.6657, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 24.56378936767578, |
|
"learning_rate": 1.2800000000000001e-05, |
|
"loss": 1.663, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.6168453693389893, |
|
"learning_rate": 1.3200000000000002e-05, |
|
"loss": 1.811, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.714534282684326, |
|
"learning_rate": 1.3600000000000002e-05, |
|
"loss": 1.7414, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.220114231109619, |
|
"learning_rate": 1.4e-05, |
|
"loss": 1.69, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.1215527057647705, |
|
"learning_rate": 1.4400000000000001e-05, |
|
"loss": 1.6113, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.8771586418151855, |
|
"learning_rate": 1.48e-05, |
|
"loss": 1.6141, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.036810398101807, |
|
"learning_rate": 1.5200000000000002e-05, |
|
"loss": 1.7124, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.9079569578170776, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 1.629, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.102452278137207, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.659, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.5280866622924805, |
|
"learning_rate": 1.64e-05, |
|
"loss": 1.6147, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.27349853515625, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 1.5316, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 21.586790084838867, |
|
"learning_rate": 1.72e-05, |
|
"loss": 1.7693, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.198234796524048, |
|
"learning_rate": 1.76e-05, |
|
"loss": 1.465, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.132129669189453, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.6176, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.825719833374023, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 1.6984, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 6.724545478820801, |
|
"learning_rate": 1.88e-05, |
|
"loss": 1.7011, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.723080635070801, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 1.6338, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.8718888759613037, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 1.5002, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 11.207817077636719, |
|
"learning_rate": 2e-05, |
|
"loss": 1.6829, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"eval_loss": 1.6066228151321411, |
|
"eval_runtime": 66.1997, |
|
"eval_samples_per_second": 15.106, |
|
"eval_steps_per_second": 15.106, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.459104537963867, |
|
"learning_rate": 1.999322033898305e-05, |
|
"loss": 1.6216, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.6112444400787354, |
|
"learning_rate": 1.9986440677966104e-05, |
|
"loss": 1.5761, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 4.285440921783447, |
|
"learning_rate": 1.9979661016949154e-05, |
|
"loss": 1.6097, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.781306505203247, |
|
"learning_rate": 1.9972881355932204e-05, |
|
"loss": 1.6967, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 7.55482292175293, |
|
"learning_rate": 1.9966101694915257e-05, |
|
"loss": 1.5974, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.083601474761963, |
|
"learning_rate": 1.9959322033898307e-05, |
|
"loss": 1.6287, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.7552945613861084, |
|
"learning_rate": 1.995254237288136e-05, |
|
"loss": 1.4225, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.149674892425537, |
|
"learning_rate": 1.994576271186441e-05, |
|
"loss": 1.5439, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.270935535430908, |
|
"learning_rate": 1.993898305084746e-05, |
|
"loss": 1.8439, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.552823543548584, |
|
"learning_rate": 1.9932203389830512e-05, |
|
"loss": 1.5678, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.6956074237823486, |
|
"learning_rate": 1.992542372881356e-05, |
|
"loss": 1.6002, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.0761303901672363, |
|
"learning_rate": 1.991864406779661e-05, |
|
"loss": 1.7211, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.1744227409362793, |
|
"learning_rate": 1.991186440677966e-05, |
|
"loss": 1.6327, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.774084091186523, |
|
"learning_rate": 1.990508474576271e-05, |
|
"loss": 1.6871, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.4767563343048096, |
|
"learning_rate": 1.9898305084745764e-05, |
|
"loss": 1.6038, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.3587417602539062, |
|
"learning_rate": 1.9891525423728814e-05, |
|
"loss": 1.8443, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.177602529525757, |
|
"learning_rate": 1.9884745762711867e-05, |
|
"loss": 1.5371, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.579742193222046, |
|
"learning_rate": 1.9877966101694917e-05, |
|
"loss": 1.4555, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.631253242492676, |
|
"learning_rate": 1.9871186440677966e-05, |
|
"loss": 1.6794, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.278071641921997, |
|
"learning_rate": 1.986440677966102e-05, |
|
"loss": 1.477, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.924114227294922, |
|
"learning_rate": 1.985762711864407e-05, |
|
"loss": 1.6933, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.5087924003601074, |
|
"learning_rate": 1.985084745762712e-05, |
|
"loss": 1.5713, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.0184319019317627, |
|
"learning_rate": 1.9844067796610172e-05, |
|
"loss": 1.6689, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.6397581100463867, |
|
"learning_rate": 1.9837288135593222e-05, |
|
"loss": 1.5636, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.12296986579895, |
|
"learning_rate": 1.9830508474576275e-05, |
|
"loss": 1.6867, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.04766583442688, |
|
"learning_rate": 1.9823728813559324e-05, |
|
"loss": 1.6027, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.8128042221069336, |
|
"learning_rate": 1.9816949152542374e-05, |
|
"loss": 1.6354, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.708834171295166, |
|
"learning_rate": 1.9810169491525427e-05, |
|
"loss": 1.5484, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.9796117544174194, |
|
"learning_rate": 1.9803389830508477e-05, |
|
"loss": 1.4511, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.195634365081787, |
|
"learning_rate": 1.9796610169491527e-05, |
|
"loss": 1.6435, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.132399320602417, |
|
"learning_rate": 1.978983050847458e-05, |
|
"loss": 1.5196, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.9280669689178467, |
|
"learning_rate": 1.9783050847457626e-05, |
|
"loss": 1.522, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.2013494968414307, |
|
"learning_rate": 1.977627118644068e-05, |
|
"loss": 1.6589, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 9.277434349060059, |
|
"learning_rate": 1.976949152542373e-05, |
|
"loss": 1.5828, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 6.09869384765625, |
|
"learning_rate": 1.9762711864406782e-05, |
|
"loss": 1.6683, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.7093573808670044, |
|
"learning_rate": 1.9755932203389832e-05, |
|
"loss": 1.6881, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.834920048713684, |
|
"learning_rate": 1.974915254237288e-05, |
|
"loss": 1.6169, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.802130699157715, |
|
"learning_rate": 1.9742372881355935e-05, |
|
"loss": 1.5526, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.086676836013794, |
|
"learning_rate": 1.9735593220338984e-05, |
|
"loss": 1.661, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.9825890064239502, |
|
"learning_rate": 1.9728813559322034e-05, |
|
"loss": 1.6302, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.9660803079605103, |
|
"learning_rate": 1.9722033898305087e-05, |
|
"loss": 1.6146, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.5572495460510254, |
|
"learning_rate": 1.9715254237288137e-05, |
|
"loss": 1.6143, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 5.440743923187256, |
|
"learning_rate": 1.970847457627119e-05, |
|
"loss": 1.4907, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.432176351547241, |
|
"learning_rate": 1.970169491525424e-05, |
|
"loss": 1.6272, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.2422198057174683, |
|
"learning_rate": 1.969491525423729e-05, |
|
"loss": 1.6404, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.0598487854003906, |
|
"learning_rate": 1.9688135593220342e-05, |
|
"loss": 1.5811, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.399864673614502, |
|
"learning_rate": 1.9681355932203392e-05, |
|
"loss": 1.6221, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.3376494646072388, |
|
"learning_rate": 1.9674576271186442e-05, |
|
"loss": 1.5384, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.062708854675293, |
|
"learning_rate": 1.9667796610169495e-05, |
|
"loss": 1.5448, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.200625419616699, |
|
"learning_rate": 1.9661016949152545e-05, |
|
"loss": 1.6656, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"eval_loss": 1.5288029909133911, |
|
"eval_runtime": 66.2062, |
|
"eval_samples_per_second": 15.104, |
|
"eval_steps_per_second": 15.104, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.217132091522217, |
|
"learning_rate": 1.9654237288135594e-05, |
|
"loss": 1.6834, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.8246206045150757, |
|
"learning_rate": 1.9647457627118644e-05, |
|
"loss": 1.7242, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.0907254219055176, |
|
"learning_rate": 1.9640677966101697e-05, |
|
"loss": 1.6493, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.1235597133636475, |
|
"learning_rate": 1.9633898305084747e-05, |
|
"loss": 1.6628, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.815682411193848, |
|
"learning_rate": 1.9627118644067796e-05, |
|
"loss": 1.5552, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.7466652393341064, |
|
"learning_rate": 1.962033898305085e-05, |
|
"loss": 1.7091, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.9031518697738647, |
|
"learning_rate": 1.96135593220339e-05, |
|
"loss": 1.4593, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.788964748382568, |
|
"learning_rate": 1.960677966101695e-05, |
|
"loss": 1.7782, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.2768681049346924, |
|
"learning_rate": 1.9600000000000002e-05, |
|
"loss": 1.6339, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.7264978885650635, |
|
"learning_rate": 1.9593220338983052e-05, |
|
"loss": 1.7666, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.865339279174805, |
|
"learning_rate": 1.95864406779661e-05, |
|
"loss": 1.7127, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.9894134998321533, |
|
"learning_rate": 1.9579661016949155e-05, |
|
"loss": 1.5671, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.2608160972595215, |
|
"learning_rate": 1.9572881355932204e-05, |
|
"loss": 1.5396, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.3654749393463135, |
|
"learning_rate": 1.9566101694915257e-05, |
|
"loss": 1.514, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.9910569190979004, |
|
"learning_rate": 1.9559322033898307e-05, |
|
"loss": 1.6519, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.1228861808776855, |
|
"learning_rate": 1.9552542372881357e-05, |
|
"loss": 1.5683, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.8690907955169678, |
|
"learning_rate": 1.954576271186441e-05, |
|
"loss": 1.5795, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.847595691680908, |
|
"learning_rate": 1.953898305084746e-05, |
|
"loss": 1.4718, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.4904606342315674, |
|
"learning_rate": 1.953220338983051e-05, |
|
"loss": 1.6105, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.352919340133667, |
|
"learning_rate": 1.9525423728813562e-05, |
|
"loss": 1.6323, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.9066511392593384, |
|
"learning_rate": 1.9518644067796612e-05, |
|
"loss": 1.5846, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.2066802978515625, |
|
"learning_rate": 1.9511864406779665e-05, |
|
"loss": 1.4947, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.038661003112793, |
|
"learning_rate": 1.950508474576271e-05, |
|
"loss": 1.5167, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.544153690338135, |
|
"learning_rate": 1.9498305084745765e-05, |
|
"loss": 1.7429, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.264445781707764, |
|
"learning_rate": 1.9491525423728814e-05, |
|
"loss": 1.7164, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 7.955377101898193, |
|
"learning_rate": 1.9484745762711864e-05, |
|
"loss": 1.4268, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.6983351707458496, |
|
"learning_rate": 1.9477966101694917e-05, |
|
"loss": 1.5497, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.3593084812164307, |
|
"learning_rate": 1.9471186440677967e-05, |
|
"loss": 1.4209, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 5.2454938888549805, |
|
"learning_rate": 1.9464406779661017e-05, |
|
"loss": 1.7169, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 4.33056116104126, |
|
"learning_rate": 1.945762711864407e-05, |
|
"loss": 1.491, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.095102071762085, |
|
"learning_rate": 1.945084745762712e-05, |
|
"loss": 1.5366, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.7006287574768066, |
|
"learning_rate": 1.9444067796610172e-05, |
|
"loss": 1.5085, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.643651008605957, |
|
"learning_rate": 1.9437288135593222e-05, |
|
"loss": 1.4597, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.9585516452789307, |
|
"learning_rate": 1.9430508474576272e-05, |
|
"loss": 1.6975, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.669740676879883, |
|
"learning_rate": 1.9423728813559325e-05, |
|
"loss": 1.4074, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.0379438400268555, |
|
"learning_rate": 1.9416949152542375e-05, |
|
"loss": 1.5256, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.81781005859375, |
|
"learning_rate": 1.9410169491525424e-05, |
|
"loss": 1.5329, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.2218108177185059, |
|
"learning_rate": 1.9403389830508477e-05, |
|
"loss": 1.5283, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.4585816860198975, |
|
"learning_rate": 1.9396610169491527e-05, |
|
"loss": 1.4265, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.117985725402832, |
|
"learning_rate": 1.938983050847458e-05, |
|
"loss": 1.6968, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.7194918394088745, |
|
"learning_rate": 1.938305084745763e-05, |
|
"loss": 1.5565, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.65537691116333, |
|
"learning_rate": 1.937627118644068e-05, |
|
"loss": 1.7526, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.0874905586242676, |
|
"learning_rate": 1.9369491525423733e-05, |
|
"loss": 1.4787, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.945019006729126, |
|
"learning_rate": 1.936271186440678e-05, |
|
"loss": 1.5238, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.7200796604156494, |
|
"learning_rate": 1.9355932203389832e-05, |
|
"loss": 1.4571, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 8.368704795837402, |
|
"learning_rate": 1.9349152542372882e-05, |
|
"loss": 1.5916, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.0204453468322754, |
|
"learning_rate": 1.934237288135593e-05, |
|
"loss": 1.6997, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.3051364421844482, |
|
"learning_rate": 1.9335593220338985e-05, |
|
"loss": 1.5315, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.246631145477295, |
|
"learning_rate": 1.9328813559322034e-05, |
|
"loss": 1.478, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.8742200136184692, |
|
"learning_rate": 1.9322033898305087e-05, |
|
"loss": 1.5419, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"eval_loss": 1.5387661457061768, |
|
"eval_runtime": 66.1849, |
|
"eval_samples_per_second": 15.109, |
|
"eval_steps_per_second": 15.109, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.624708652496338, |
|
"learning_rate": 1.9315254237288137e-05, |
|
"loss": 1.5002, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 5.5269856452941895, |
|
"learning_rate": 1.9308474576271187e-05, |
|
"loss": 1.6267, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.879159688949585, |
|
"learning_rate": 1.930169491525424e-05, |
|
"loss": 1.4951, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.727252006530762, |
|
"learning_rate": 1.929491525423729e-05, |
|
"loss": 1.5438, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.9965201616287231, |
|
"learning_rate": 1.928813559322034e-05, |
|
"loss": 1.6525, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.61816143989563, |
|
"learning_rate": 1.9281355932203392e-05, |
|
"loss": 1.4746, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.8157131671905518, |
|
"learning_rate": 1.9274576271186442e-05, |
|
"loss": 1.5454, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.738640546798706, |
|
"learning_rate": 1.9267796610169492e-05, |
|
"loss": 1.4876, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.118277072906494, |
|
"learning_rate": 1.9261016949152545e-05, |
|
"loss": 1.5587, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 6.730972766876221, |
|
"learning_rate": 1.9254237288135595e-05, |
|
"loss": 1.6752, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 9.461782455444336, |
|
"learning_rate": 1.9247457627118648e-05, |
|
"loss": 1.5409, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.566100120544434, |
|
"learning_rate": 1.9240677966101698e-05, |
|
"loss": 1.4724, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.8972136974334717, |
|
"learning_rate": 1.9233898305084747e-05, |
|
"loss": 1.5706, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.0403244495391846, |
|
"learning_rate": 1.92271186440678e-05, |
|
"loss": 1.6694, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.0590033531188965, |
|
"learning_rate": 1.9220338983050847e-05, |
|
"loss": 1.5711, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.9862588047981262, |
|
"learning_rate": 1.92135593220339e-05, |
|
"loss": 1.486, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.9184350967407227, |
|
"learning_rate": 1.920677966101695e-05, |
|
"loss": 1.5526, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.7024717330932617, |
|
"learning_rate": 1.9200000000000003e-05, |
|
"loss": 1.6754, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.887876510620117, |
|
"learning_rate": 1.9193220338983052e-05, |
|
"loss": 1.6843, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.6079001426696777, |
|
"learning_rate": 1.9186440677966102e-05, |
|
"loss": 1.5116, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.407667875289917, |
|
"learning_rate": 1.9179661016949155e-05, |
|
"loss": 1.581, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.201183795928955, |
|
"learning_rate": 1.9172881355932205e-05, |
|
"loss": 1.6449, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.4738363027572632, |
|
"learning_rate": 1.9166101694915254e-05, |
|
"loss": 1.3959, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.745976209640503, |
|
"learning_rate": 1.9159322033898308e-05, |
|
"loss": 1.394, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.694169759750366, |
|
"learning_rate": 1.9152542372881357e-05, |
|
"loss": 1.6542, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.3387959003448486, |
|
"learning_rate": 1.9145762711864407e-05, |
|
"loss": 1.6928, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.8310043811798096, |
|
"learning_rate": 1.913898305084746e-05, |
|
"loss": 1.5955, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.731762647628784, |
|
"learning_rate": 1.913220338983051e-05, |
|
"loss": 1.7997, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.896686553955078, |
|
"learning_rate": 1.9125423728813563e-05, |
|
"loss": 1.5137, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.8352593183517456, |
|
"learning_rate": 1.9118644067796613e-05, |
|
"loss": 1.5224, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 4.474410057067871, |
|
"learning_rate": 1.9111864406779662e-05, |
|
"loss": 1.6659, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.063828468322754, |
|
"learning_rate": 1.9105084745762715e-05, |
|
"loss": 1.4191, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.9058163166046143, |
|
"learning_rate": 1.9098305084745765e-05, |
|
"loss": 1.5386, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.915477991104126, |
|
"learning_rate": 1.9091525423728815e-05, |
|
"loss": 1.5756, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.423071265220642, |
|
"learning_rate": 1.9084745762711868e-05, |
|
"loss": 1.5377, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.5523242950439453, |
|
"learning_rate": 1.9077966101694914e-05, |
|
"loss": 1.5199, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.609219789505005, |
|
"learning_rate": 1.9071186440677967e-05, |
|
"loss": 1.5402, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.032884120941162, |
|
"learning_rate": 1.9064406779661017e-05, |
|
"loss": 1.4348, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.192718029022217, |
|
"learning_rate": 1.905762711864407e-05, |
|
"loss": 1.5971, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.0084311962127686, |
|
"learning_rate": 1.905084745762712e-05, |
|
"loss": 1.459, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.646857738494873, |
|
"learning_rate": 1.904406779661017e-05, |
|
"loss": 1.507, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.855058431625366, |
|
"learning_rate": 1.9037288135593223e-05, |
|
"loss": 1.5667, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.520700216293335, |
|
"learning_rate": 1.9030508474576272e-05, |
|
"loss": 1.573, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.6929845809936523, |
|
"learning_rate": 1.9023728813559322e-05, |
|
"loss": 1.4966, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.283622980117798, |
|
"learning_rate": 1.9016949152542375e-05, |
|
"loss": 1.5018, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3686760663986206, |
|
"learning_rate": 1.9010169491525425e-05, |
|
"loss": 1.3841, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.02280592918396, |
|
"learning_rate": 1.9003389830508478e-05, |
|
"loss": 1.3902, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.312986135482788, |
|
"learning_rate": 1.8996610169491528e-05, |
|
"loss": 1.6016, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.497074604034424, |
|
"learning_rate": 1.8989830508474577e-05, |
|
"loss": 1.6256, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 8.351258277893066, |
|
"learning_rate": 1.898305084745763e-05, |
|
"loss": 1.5034, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"eval_loss": 1.555788516998291, |
|
"eval_runtime": 66.1337, |
|
"eval_samples_per_second": 15.121, |
|
"eval_steps_per_second": 15.121, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 10.036495208740234, |
|
"learning_rate": 1.897627118644068e-05, |
|
"loss": 1.6968, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.859747886657715, |
|
"learning_rate": 1.896949152542373e-05, |
|
"loss": 1.655, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.809723138809204, |
|
"learning_rate": 1.8962711864406783e-05, |
|
"loss": 1.5661, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.2033305168151855, |
|
"learning_rate": 1.8955932203389833e-05, |
|
"loss": 1.5528, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.821791887283325, |
|
"learning_rate": 1.8949152542372882e-05, |
|
"loss": 1.5215, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.9140360355377197, |
|
"learning_rate": 1.8942372881355932e-05, |
|
"loss": 1.6541, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.9218617677688599, |
|
"learning_rate": 1.8935593220338985e-05, |
|
"loss": 1.6119, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.7994463443756104, |
|
"learning_rate": 1.8928813559322035e-05, |
|
"loss": 1.5222, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.291020631790161, |
|
"learning_rate": 1.8922033898305085e-05, |
|
"loss": 1.5315, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.737337589263916, |
|
"learning_rate": 1.8915254237288138e-05, |
|
"loss": 1.4785, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.1229870319366455, |
|
"learning_rate": 1.8908474576271187e-05, |
|
"loss": 1.6181, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.7481417655944824, |
|
"learning_rate": 1.8901694915254237e-05, |
|
"loss": 1.4123, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.3576602935791016, |
|
"learning_rate": 1.889491525423729e-05, |
|
"loss": 1.5839, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.292433261871338, |
|
"learning_rate": 1.888813559322034e-05, |
|
"loss": 1.5668, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 5.039631366729736, |
|
"learning_rate": 1.8881355932203393e-05, |
|
"loss": 1.3932, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.142812490463257, |
|
"learning_rate": 1.8874576271186443e-05, |
|
"loss": 1.6613, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.2970080375671387, |
|
"learning_rate": 1.8867796610169492e-05, |
|
"loss": 1.4982, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 7.887810230255127, |
|
"learning_rate": 1.8861016949152545e-05, |
|
"loss": 1.6085, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.8233675956726074, |
|
"learning_rate": 1.8854237288135595e-05, |
|
"loss": 1.668, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.357754707336426, |
|
"learning_rate": 1.8847457627118645e-05, |
|
"loss": 1.6456, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 5.471128463745117, |
|
"learning_rate": 1.8840677966101698e-05, |
|
"loss": 1.5198, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.850195050239563, |
|
"learning_rate": 1.8833898305084748e-05, |
|
"loss": 1.5557, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.1556105613708496, |
|
"learning_rate": 1.8827118644067797e-05, |
|
"loss": 1.6411, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.6570167541503906, |
|
"learning_rate": 1.882033898305085e-05, |
|
"loss": 1.474, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.989947557449341, |
|
"learning_rate": 1.88135593220339e-05, |
|
"loss": 1.5413, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.8367825746536255, |
|
"learning_rate": 1.8806779661016953e-05, |
|
"loss": 1.5744, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.2120065689086914, |
|
"learning_rate": 1.88e-05, |
|
"loss": 1.5589, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.29278302192688, |
|
"learning_rate": 1.8793220338983053e-05, |
|
"loss": 1.4468, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.6164047718048096, |
|
"learning_rate": 1.8786440677966102e-05, |
|
"loss": 1.5143, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.008776903152466, |
|
"learning_rate": 1.8779661016949152e-05, |
|
"loss": 1.5692, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.0924556255340576, |
|
"learning_rate": 1.8772881355932205e-05, |
|
"loss": 1.4559, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.1632914543151855, |
|
"learning_rate": 1.8766101694915255e-05, |
|
"loss": 1.5876, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.367845058441162, |
|
"learning_rate": 1.8759322033898305e-05, |
|
"loss": 1.5093, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.618950843811035, |
|
"learning_rate": 1.8752542372881358e-05, |
|
"loss": 1.4839, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.7340502738952637, |
|
"learning_rate": 1.8745762711864407e-05, |
|
"loss": 1.7354, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.4295051097869873, |
|
"learning_rate": 1.873898305084746e-05, |
|
"loss": 1.4937, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.078732967376709, |
|
"learning_rate": 1.873220338983051e-05, |
|
"loss": 1.7125, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.103182554244995, |
|
"learning_rate": 1.872542372881356e-05, |
|
"loss": 1.4019, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.760866641998291, |
|
"learning_rate": 1.8718644067796613e-05, |
|
"loss": 1.4383, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.4431618452072144, |
|
"learning_rate": 1.8711864406779663e-05, |
|
"loss": 1.5302, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.5087249279022217, |
|
"learning_rate": 1.8705084745762712e-05, |
|
"loss": 1.5693, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.648737668991089, |
|
"learning_rate": 1.8698305084745765e-05, |
|
"loss": 1.2027, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.0298357009887695, |
|
"learning_rate": 1.8691525423728815e-05, |
|
"loss": 1.4772, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.11863899230957, |
|
"learning_rate": 1.8684745762711868e-05, |
|
"loss": 1.402, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.1432223320007324, |
|
"learning_rate": 1.8677966101694918e-05, |
|
"loss": 1.5907, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.0923776626586914, |
|
"learning_rate": 1.8671186440677968e-05, |
|
"loss": 1.569, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.987396001815796, |
|
"learning_rate": 1.866440677966102e-05, |
|
"loss": 1.6504, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.959685802459717, |
|
"learning_rate": 1.8657627118644067e-05, |
|
"loss": 1.4029, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.1348440647125244, |
|
"learning_rate": 1.865084745762712e-05, |
|
"loss": 1.5063, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.788222074508667, |
|
"learning_rate": 1.864406779661017e-05, |
|
"loss": 1.5971, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"eval_loss": 1.5344786643981934, |
|
"eval_runtime": 66.1387, |
|
"eval_samples_per_second": 15.12, |
|
"eval_steps_per_second": 15.12, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.9007914066314697, |
|
"learning_rate": 1.863728813559322e-05, |
|
"loss": 1.5292, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.1382060050964355, |
|
"learning_rate": 1.8630508474576273e-05, |
|
"loss": 1.516, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.914665162563324, |
|
"learning_rate": 1.8623728813559322e-05, |
|
"loss": 1.3994, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 4.413509368896484, |
|
"learning_rate": 1.8616949152542376e-05, |
|
"loss": 1.5696, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.9687520265579224, |
|
"learning_rate": 1.8610169491525425e-05, |
|
"loss": 1.6693, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.4700605869293213, |
|
"learning_rate": 1.8603389830508475e-05, |
|
"loss": 1.6217, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.2257847785949707, |
|
"learning_rate": 1.8596610169491528e-05, |
|
"loss": 1.65, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 15.683956146240234, |
|
"learning_rate": 1.8589830508474578e-05, |
|
"loss": 1.5663, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.2584922313690186, |
|
"learning_rate": 1.8583050847457627e-05, |
|
"loss": 1.5146, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.900083065032959, |
|
"learning_rate": 1.857627118644068e-05, |
|
"loss": 1.5062, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.6166887283325195, |
|
"learning_rate": 1.856949152542373e-05, |
|
"loss": 1.3145, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.9894976019859314, |
|
"learning_rate": 1.856271186440678e-05, |
|
"loss": 1.6037, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.897536277770996, |
|
"learning_rate": 1.8555932203389833e-05, |
|
"loss": 1.5846, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.8714261054992676, |
|
"learning_rate": 1.8549152542372883e-05, |
|
"loss": 1.5008, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.062786817550659, |
|
"learning_rate": 1.8542372881355936e-05, |
|
"loss": 1.5885, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.548590660095215, |
|
"learning_rate": 1.8535593220338986e-05, |
|
"loss": 1.4718, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.9573743343353271, |
|
"learning_rate": 1.8528813559322035e-05, |
|
"loss": 1.5926, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.7771761417388916, |
|
"learning_rate": 1.852203389830509e-05, |
|
"loss": 1.5136, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.312812328338623, |
|
"learning_rate": 1.8515254237288135e-05, |
|
"loss": 1.4674, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.288461685180664, |
|
"learning_rate": 1.8508474576271188e-05, |
|
"loss": 1.576, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.608473062515259, |
|
"learning_rate": 1.8501694915254237e-05, |
|
"loss": 1.5574, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.6018731594085693, |
|
"learning_rate": 1.849491525423729e-05, |
|
"loss": 1.473, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.2333261966705322, |
|
"learning_rate": 1.848813559322034e-05, |
|
"loss": 1.4562, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.693967342376709, |
|
"learning_rate": 1.848135593220339e-05, |
|
"loss": 1.433, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.6075336933135986, |
|
"learning_rate": 1.8474576271186443e-05, |
|
"loss": 1.2995, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.2900924682617188, |
|
"learning_rate": 1.8467796610169493e-05, |
|
"loss": 1.4313, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 5.270084381103516, |
|
"learning_rate": 1.8461016949152542e-05, |
|
"loss": 1.4531, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.3377845287323, |
|
"learning_rate": 1.8454237288135596e-05, |
|
"loss": 1.4393, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.9369862079620361, |
|
"learning_rate": 1.8447457627118645e-05, |
|
"loss": 1.5269, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.107175350189209, |
|
"learning_rate": 1.8440677966101695e-05, |
|
"loss": 1.4581, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.7586755752563477, |
|
"learning_rate": 1.8433898305084748e-05, |
|
"loss": 1.4577, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.3124182224273682, |
|
"learning_rate": 1.8427118644067798e-05, |
|
"loss": 1.6085, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.8919298648834229, |
|
"learning_rate": 1.842033898305085e-05, |
|
"loss": 1.3827, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.49289870262146, |
|
"learning_rate": 1.84135593220339e-05, |
|
"loss": 1.4628, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.832827568054199, |
|
"learning_rate": 1.840677966101695e-05, |
|
"loss": 1.4701, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.2280476093292236, |
|
"learning_rate": 1.8400000000000003e-05, |
|
"loss": 1.5576, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.703963279724121, |
|
"learning_rate": 1.8393220338983053e-05, |
|
"loss": 1.3686, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.554801106452942, |
|
"learning_rate": 1.8386440677966103e-05, |
|
"loss": 1.5867, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.4219539165496826, |
|
"learning_rate": 1.8379661016949153e-05, |
|
"loss": 1.696, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 7.199713706970215, |
|
"learning_rate": 1.8372881355932202e-05, |
|
"loss": 1.5041, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.7661526203155518, |
|
"learning_rate": 1.8366101694915255e-05, |
|
"loss": 1.4472, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 6.59897518157959, |
|
"learning_rate": 1.8359322033898305e-05, |
|
"loss": 1.4166, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.927669882774353, |
|
"learning_rate": 1.8352542372881358e-05, |
|
"loss": 1.4674, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0190672874450684, |
|
"learning_rate": 1.8345762711864408e-05, |
|
"loss": 1.4591, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.63718843460083, |
|
"learning_rate": 1.8338983050847458e-05, |
|
"loss": 1.7469, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.5168763399124146, |
|
"learning_rate": 1.833220338983051e-05, |
|
"loss": 1.5691, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.323123574256897, |
|
"learning_rate": 1.832542372881356e-05, |
|
"loss": 1.385, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.590506076812744, |
|
"learning_rate": 1.831864406779661e-05, |
|
"loss": 1.429, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 4.562034606933594, |
|
"learning_rate": 1.8311864406779663e-05, |
|
"loss": 1.4169, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.3229966163635254, |
|
"learning_rate": 1.8305084745762713e-05, |
|
"loss": 1.5123, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"eval_loss": 1.5320124626159668, |
|
"eval_runtime": 66.1653, |
|
"eval_samples_per_second": 15.114, |
|
"eval_steps_per_second": 15.114, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.8690502643585205, |
|
"learning_rate": 1.8298305084745766e-05, |
|
"loss": 1.2614, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.8421173095703125, |
|
"learning_rate": 1.8291525423728816e-05, |
|
"loss": 1.3572, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.3784098625183105, |
|
"learning_rate": 1.8284745762711865e-05, |
|
"loss": 1.4465, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.611422061920166, |
|
"learning_rate": 1.827796610169492e-05, |
|
"loss": 1.4531, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.3969950675964355, |
|
"learning_rate": 1.8271186440677968e-05, |
|
"loss": 1.3137, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.92153263092041, |
|
"learning_rate": 1.8264406779661018e-05, |
|
"loss": 1.6209, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.952179431915283, |
|
"learning_rate": 1.825762711864407e-05, |
|
"loss": 1.4923, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.00267219543457, |
|
"learning_rate": 1.825084745762712e-05, |
|
"loss": 1.5688, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.1664321422576904, |
|
"learning_rate": 1.824406779661017e-05, |
|
"loss": 1.4545, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.1541237831115723, |
|
"learning_rate": 1.823728813559322e-05, |
|
"loss": 1.6623, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.577681064605713, |
|
"learning_rate": 1.8230508474576273e-05, |
|
"loss": 1.6301, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.2123563289642334, |
|
"learning_rate": 1.8223728813559323e-05, |
|
"loss": 1.5614, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.7630640268325806, |
|
"learning_rate": 1.8216949152542373e-05, |
|
"loss": 1.5482, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.5110881328582764, |
|
"learning_rate": 1.8210169491525426e-05, |
|
"loss": 1.4324, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.9992825984954834, |
|
"learning_rate": 1.8203389830508475e-05, |
|
"loss": 1.5376, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.825230836868286, |
|
"learning_rate": 1.8196610169491525e-05, |
|
"loss": 1.3925, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 5.502832412719727, |
|
"learning_rate": 1.8189830508474578e-05, |
|
"loss": 1.5082, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.258467197418213, |
|
"learning_rate": 1.8183050847457628e-05, |
|
"loss": 1.512, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.260064601898193, |
|
"learning_rate": 1.817627118644068e-05, |
|
"loss": 1.5949, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.3906792402267456, |
|
"learning_rate": 1.816949152542373e-05, |
|
"loss": 1.4887, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 6.833672523498535, |
|
"learning_rate": 1.816271186440678e-05, |
|
"loss": 1.6286, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 7.004729747772217, |
|
"learning_rate": 1.8155932203389833e-05, |
|
"loss": 1.6974, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.688082695007324, |
|
"learning_rate": 1.8149152542372883e-05, |
|
"loss": 1.577, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.2561973333358765, |
|
"learning_rate": 1.8142372881355933e-05, |
|
"loss": 1.5154, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.1860358715057373, |
|
"learning_rate": 1.8135593220338986e-05, |
|
"loss": 1.5281, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.4622399806976318, |
|
"learning_rate": 1.8128813559322036e-05, |
|
"loss": 1.4103, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.036226511001587, |
|
"learning_rate": 1.8122033898305085e-05, |
|
"loss": 1.4003, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.228930473327637, |
|
"learning_rate": 1.811525423728814e-05, |
|
"loss": 1.6889, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 4.7660322189331055, |
|
"learning_rate": 1.8108474576271188e-05, |
|
"loss": 1.5185, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.857886552810669, |
|
"learning_rate": 1.810169491525424e-05, |
|
"loss": 1.4166, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.357570171356201, |
|
"learning_rate": 1.8094915254237288e-05, |
|
"loss": 1.6341, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.1648855209350586, |
|
"learning_rate": 1.808813559322034e-05, |
|
"loss": 1.6001, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.5360536575317383, |
|
"learning_rate": 1.808135593220339e-05, |
|
"loss": 1.5059, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.741981267929077, |
|
"learning_rate": 1.807457627118644e-05, |
|
"loss": 1.5082, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.4550766944885254, |
|
"learning_rate": 1.8067796610169493e-05, |
|
"loss": 1.6378, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.9454740285873413, |
|
"learning_rate": 1.8061016949152543e-05, |
|
"loss": 1.6602, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 3.708360195159912, |
|
"learning_rate": 1.8054237288135593e-05, |
|
"loss": 1.4584, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.4115514755249023, |
|
"learning_rate": 1.8047457627118646e-05, |
|
"loss": 1.4053, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.075867176055908, |
|
"learning_rate": 1.8040677966101695e-05, |
|
"loss": 1.46, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.1672651767730713, |
|
"learning_rate": 1.803389830508475e-05, |
|
"loss": 1.4964, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.5812578201293945, |
|
"learning_rate": 1.8027118644067798e-05, |
|
"loss": 1.4005, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.583771228790283, |
|
"learning_rate": 1.8020338983050848e-05, |
|
"loss": 1.6024, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.464291095733643, |
|
"learning_rate": 1.80135593220339e-05, |
|
"loss": 1.5075, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.609657049179077, |
|
"learning_rate": 1.800677966101695e-05, |
|
"loss": 1.4527, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.844227910041809, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.471, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.5781702995300293, |
|
"learning_rate": 1.7993220338983054e-05, |
|
"loss": 1.5221, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.8293837308883667, |
|
"learning_rate": 1.7986440677966103e-05, |
|
"loss": 1.4831, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.273003101348877, |
|
"learning_rate": 1.7979661016949156e-05, |
|
"loss": 1.5131, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.1019296646118164, |
|
"learning_rate": 1.7972881355932206e-05, |
|
"loss": 1.4346, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.419081687927246, |
|
"learning_rate": 1.7966101694915256e-05, |
|
"loss": 1.496, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"eval_loss": 1.5301283597946167, |
|
"eval_runtime": 66.1385, |
|
"eval_samples_per_second": 15.12, |
|
"eval_steps_per_second": 15.12, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 4.49799919128418, |
|
"learning_rate": 1.795932203389831e-05, |
|
"loss": 1.5304, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.267191171646118, |
|
"learning_rate": 1.7952542372881355e-05, |
|
"loss": 1.5103, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.4655425548553467, |
|
"learning_rate": 1.7945762711864408e-05, |
|
"loss": 1.3131, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.289358615875244, |
|
"learning_rate": 1.7938983050847458e-05, |
|
"loss": 1.2731, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.528286337852478, |
|
"learning_rate": 1.7932203389830508e-05, |
|
"loss": 1.6249, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.5927760601043701, |
|
"learning_rate": 1.792542372881356e-05, |
|
"loss": 1.5177, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.5520817041397095, |
|
"learning_rate": 1.791864406779661e-05, |
|
"loss": 1.4523, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.531754970550537, |
|
"learning_rate": 1.7911864406779664e-05, |
|
"loss": 1.5342, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.6139304637908936, |
|
"learning_rate": 1.7905084745762713e-05, |
|
"loss": 1.3377, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.242190837860107, |
|
"learning_rate": 1.7898305084745763e-05, |
|
"loss": 1.592, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.10337495803833, |
|
"learning_rate": 1.7891525423728816e-05, |
|
"loss": 1.4516, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.615224599838257, |
|
"learning_rate": 1.7884745762711866e-05, |
|
"loss": 1.4325, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 5.353131294250488, |
|
"learning_rate": 1.7877966101694916e-05, |
|
"loss": 1.5772, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.1678059101104736, |
|
"learning_rate": 1.787118644067797e-05, |
|
"loss": 1.572, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.2298007011413574, |
|
"learning_rate": 1.7864406779661018e-05, |
|
"loss": 1.5214, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.405092477798462, |
|
"learning_rate": 1.785762711864407e-05, |
|
"loss": 1.4677, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.6669983863830566, |
|
"learning_rate": 1.785084745762712e-05, |
|
"loss": 1.2853, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.2381701469421387, |
|
"learning_rate": 1.784406779661017e-05, |
|
"loss": 1.3501, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.3621437549591064, |
|
"learning_rate": 1.7837288135593224e-05, |
|
"loss": 1.5105, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.1474658250808716, |
|
"learning_rate": 1.7830508474576274e-05, |
|
"loss": 1.5107, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 9.769930839538574, |
|
"learning_rate": 1.7823728813559323e-05, |
|
"loss": 1.8114, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.6212892532348633, |
|
"learning_rate": 1.7816949152542376e-05, |
|
"loss": 1.4948, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 3.909820795059204, |
|
"learning_rate": 1.7810169491525423e-05, |
|
"loss": 1.4145, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.870615005493164, |
|
"learning_rate": 1.7803389830508476e-05, |
|
"loss": 1.4856, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.5912163257598877, |
|
"learning_rate": 1.7796610169491526e-05, |
|
"loss": 1.5021, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.303807020187378, |
|
"learning_rate": 1.778983050847458e-05, |
|
"loss": 1.4348, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.5953946113586426, |
|
"learning_rate": 1.778305084745763e-05, |
|
"loss": 1.4073, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.129793643951416, |
|
"learning_rate": 1.7776271186440678e-05, |
|
"loss": 1.4827, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 8.186966896057129, |
|
"learning_rate": 1.776949152542373e-05, |
|
"loss": 1.5887, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.2956864833831787, |
|
"learning_rate": 1.776271186440678e-05, |
|
"loss": 1.4925, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.3646732568740845, |
|
"learning_rate": 1.775593220338983e-05, |
|
"loss": 1.5814, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.3318071365356445, |
|
"learning_rate": 1.7749152542372884e-05, |
|
"loss": 1.6139, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.53743314743042, |
|
"learning_rate": 1.7742372881355933e-05, |
|
"loss": 1.5198, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 7.048913478851318, |
|
"learning_rate": 1.7735593220338983e-05, |
|
"loss": 1.4417, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.367314100265503, |
|
"learning_rate": 1.7728813559322036e-05, |
|
"loss": 1.4633, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.619931697845459, |
|
"learning_rate": 1.7722033898305086e-05, |
|
"loss": 1.3719, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.264190435409546, |
|
"learning_rate": 1.771525423728814e-05, |
|
"loss": 1.3079, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.68599796295166, |
|
"learning_rate": 1.770847457627119e-05, |
|
"loss": 1.6278, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.21970272064209, |
|
"learning_rate": 1.770169491525424e-05, |
|
"loss": 1.4814, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.3647706508636475, |
|
"learning_rate": 1.769491525423729e-05, |
|
"loss": 1.5973, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.2886738777160645, |
|
"learning_rate": 1.768813559322034e-05, |
|
"loss": 1.5224, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.6817705631256104, |
|
"learning_rate": 1.768135593220339e-05, |
|
"loss": 1.566, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.6243913173675537, |
|
"learning_rate": 1.767457627118644e-05, |
|
"loss": 1.6011, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.8161165714263916, |
|
"learning_rate": 1.7667796610169494e-05, |
|
"loss": 1.5303, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.486697196960449, |
|
"learning_rate": 1.7661016949152543e-05, |
|
"loss": 1.4032, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.8153432607650757, |
|
"learning_rate": 1.7654237288135593e-05, |
|
"loss": 1.5167, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.186868190765381, |
|
"learning_rate": 1.7647457627118646e-05, |
|
"loss": 1.6066, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.1040332317352295, |
|
"learning_rate": 1.7640677966101696e-05, |
|
"loss": 1.4142, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.989042282104492, |
|
"learning_rate": 1.7633898305084746e-05, |
|
"loss": 1.4877, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.7532989978790283, |
|
"learning_rate": 1.76271186440678e-05, |
|
"loss": 1.4016, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_loss": 1.5089428424835205, |
|
"eval_runtime": 66.2329, |
|
"eval_samples_per_second": 15.098, |
|
"eval_steps_per_second": 15.098, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.605846405029297, |
|
"learning_rate": 1.762033898305085e-05, |
|
"loss": 1.6163, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.1976850032806396, |
|
"learning_rate": 1.7613559322033898e-05, |
|
"loss": 1.4298, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.412558555603027, |
|
"learning_rate": 1.760677966101695e-05, |
|
"loss": 1.3316, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 5.241945266723633, |
|
"learning_rate": 1.76e-05, |
|
"loss": 1.6258, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 11.647382736206055, |
|
"learning_rate": 1.7593220338983054e-05, |
|
"loss": 1.5479, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.091214656829834, |
|
"learning_rate": 1.7586440677966104e-05, |
|
"loss": 1.4385, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.492511034011841, |
|
"learning_rate": 1.7579661016949153e-05, |
|
"loss": 1.6785, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.9180244207382202, |
|
"learning_rate": 1.7572881355932206e-05, |
|
"loss": 1.6234, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.256627082824707, |
|
"learning_rate": 1.7566101694915256e-05, |
|
"loss": 1.3143, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.2864432334899902, |
|
"learning_rate": 1.7559322033898306e-05, |
|
"loss": 1.4961, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.091422080993652, |
|
"learning_rate": 1.755254237288136e-05, |
|
"loss": 1.27, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.6744223833084106, |
|
"learning_rate": 1.754576271186441e-05, |
|
"loss": 1.6859, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.741482973098755, |
|
"learning_rate": 1.753898305084746e-05, |
|
"loss": 1.5382, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.2129416465759277, |
|
"learning_rate": 1.7532203389830508e-05, |
|
"loss": 1.548, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.9851702451705933, |
|
"learning_rate": 1.752542372881356e-05, |
|
"loss": 1.5777, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.7135250568389893, |
|
"learning_rate": 1.751864406779661e-05, |
|
"loss": 1.6374, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 6.881992340087891, |
|
"learning_rate": 1.751186440677966e-05, |
|
"loss": 1.5171, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.8673460483551025, |
|
"learning_rate": 1.7505084745762714e-05, |
|
"loss": 1.3488, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 3.7248244285583496, |
|
"learning_rate": 1.7498305084745763e-05, |
|
"loss": 1.4269, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 4.191452503204346, |
|
"learning_rate": 1.7491525423728813e-05, |
|
"loss": 1.591, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.9477959871292114, |
|
"learning_rate": 1.7484745762711866e-05, |
|
"loss": 1.5194, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.151371002197266, |
|
"learning_rate": 1.7477966101694916e-05, |
|
"loss": 1.4644, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.285062551498413, |
|
"learning_rate": 1.747118644067797e-05, |
|
"loss": 1.3697, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.2172136306762695, |
|
"learning_rate": 1.746440677966102e-05, |
|
"loss": 1.3542, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.321295738220215, |
|
"learning_rate": 1.745762711864407e-05, |
|
"loss": 1.6265, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.2060482501983643, |
|
"learning_rate": 1.745084745762712e-05, |
|
"loss": 1.5214, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.697052240371704, |
|
"learning_rate": 1.744406779661017e-05, |
|
"loss": 1.5446, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.639937162399292, |
|
"learning_rate": 1.743728813559322e-05, |
|
"loss": 1.5329, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.5502707958221436, |
|
"learning_rate": 1.7430508474576274e-05, |
|
"loss": 1.4997, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.514580726623535, |
|
"learning_rate": 1.7423728813559324e-05, |
|
"loss": 1.4641, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.264512062072754, |
|
"learning_rate": 1.7416949152542373e-05, |
|
"loss": 1.5588, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.865510940551758, |
|
"learning_rate": 1.7410169491525427e-05, |
|
"loss": 1.5777, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.872544050216675, |
|
"learning_rate": 1.7403389830508476e-05, |
|
"loss": 1.4683, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.1891725063323975, |
|
"learning_rate": 1.739661016949153e-05, |
|
"loss": 1.4572, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.604435443878174, |
|
"learning_rate": 1.7389830508474576e-05, |
|
"loss": 1.3685, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.9309135675430298, |
|
"learning_rate": 1.738305084745763e-05, |
|
"loss": 1.6086, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.2651278972625732, |
|
"learning_rate": 1.737627118644068e-05, |
|
"loss": 1.5768, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.4745676517486572, |
|
"learning_rate": 1.7369491525423728e-05, |
|
"loss": 1.4553, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.904428958892822, |
|
"learning_rate": 1.736271186440678e-05, |
|
"loss": 1.654, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.6250745058059692, |
|
"learning_rate": 1.735593220338983e-05, |
|
"loss": 1.5443, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.289626121520996, |
|
"learning_rate": 1.734915254237288e-05, |
|
"loss": 1.4691, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 5.800081729888916, |
|
"learning_rate": 1.7342372881355934e-05, |
|
"loss": 1.543, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 9.145909309387207, |
|
"learning_rate": 1.7335593220338983e-05, |
|
"loss": 1.4814, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.22583270072937, |
|
"learning_rate": 1.7328813559322037e-05, |
|
"loss": 1.548, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.693650722503662, |
|
"learning_rate": 1.7322033898305086e-05, |
|
"loss": 1.5602, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.1424403190612793, |
|
"learning_rate": 1.7315254237288136e-05, |
|
"loss": 1.5298, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.8103983402252197, |
|
"learning_rate": 1.730847457627119e-05, |
|
"loss": 1.435, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.2972493171691895, |
|
"learning_rate": 1.730169491525424e-05, |
|
"loss": 1.4906, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.437459707260132, |
|
"learning_rate": 1.729491525423729e-05, |
|
"loss": 1.5637, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.561988115310669, |
|
"learning_rate": 1.728813559322034e-05, |
|
"loss": 1.572, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"eval_loss": 1.4887409210205078, |
|
"eval_runtime": 66.1778, |
|
"eval_samples_per_second": 15.111, |
|
"eval_steps_per_second": 15.111, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.4782548248767853, |
|
"learning_rate": 1.728135593220339e-05, |
|
"loss": 1.606, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.1626386642456055, |
|
"learning_rate": 1.7274576271186444e-05, |
|
"loss": 1.6311, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.057555675506592, |
|
"learning_rate": 1.7267796610169494e-05, |
|
"loss": 1.495, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.714250087738037, |
|
"learning_rate": 1.7261016949152544e-05, |
|
"loss": 1.5219, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1815061569213867, |
|
"learning_rate": 1.7254237288135597e-05, |
|
"loss": 1.5227, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 3.602353811264038, |
|
"learning_rate": 1.7247457627118643e-05, |
|
"loss": 1.6756, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.7794777154922485, |
|
"learning_rate": 1.7240677966101696e-05, |
|
"loss": 1.3716, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.1229944229125977, |
|
"learning_rate": 1.7233898305084746e-05, |
|
"loss": 1.4612, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 4.656471252441406, |
|
"learning_rate": 1.7227118644067796e-05, |
|
"loss": 1.6358, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.0882911682128906, |
|
"learning_rate": 1.722033898305085e-05, |
|
"loss": 1.5147, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.070713996887207, |
|
"learning_rate": 1.72135593220339e-05, |
|
"loss": 1.5253, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.7038753032684326, |
|
"learning_rate": 1.720677966101695e-05, |
|
"loss": 1.4375, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.5298070907592773, |
|
"learning_rate": 1.72e-05, |
|
"loss": 1.5301, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.2928740978240967, |
|
"learning_rate": 1.719322033898305e-05, |
|
"loss": 1.2571, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.660508155822754, |
|
"learning_rate": 1.7186440677966104e-05, |
|
"loss": 1.5168, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.104097366333008, |
|
"learning_rate": 1.7179661016949154e-05, |
|
"loss": 1.4134, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.954836368560791, |
|
"learning_rate": 1.7172881355932204e-05, |
|
"loss": 1.4909, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.392303466796875, |
|
"learning_rate": 1.7166101694915257e-05, |
|
"loss": 1.5639, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.9031317234039307, |
|
"learning_rate": 1.7159322033898306e-05, |
|
"loss": 1.4521, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.280984401702881, |
|
"learning_rate": 1.715254237288136e-05, |
|
"loss": 1.3877, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2300760746002197, |
|
"learning_rate": 1.714576271186441e-05, |
|
"loss": 1.5971, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.845804452896118, |
|
"learning_rate": 1.713898305084746e-05, |
|
"loss": 1.5987, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 5.861780643463135, |
|
"learning_rate": 1.7132203389830512e-05, |
|
"loss": 1.7391, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.704226970672607, |
|
"learning_rate": 1.712542372881356e-05, |
|
"loss": 1.3629, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.362378716468811, |
|
"learning_rate": 1.711864406779661e-05, |
|
"loss": 1.5607, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.89095139503479, |
|
"learning_rate": 1.711186440677966e-05, |
|
"loss": 1.6382, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.8033549785614014, |
|
"learning_rate": 1.710508474576271e-05, |
|
"loss": 1.6113, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.045806884765625, |
|
"learning_rate": 1.7098305084745764e-05, |
|
"loss": 1.4223, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 7.401500701904297, |
|
"learning_rate": 1.7091525423728814e-05, |
|
"loss": 1.4659, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.490501880645752, |
|
"learning_rate": 1.7084745762711867e-05, |
|
"loss": 1.4543, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.137923240661621, |
|
"learning_rate": 1.7077966101694916e-05, |
|
"loss": 1.4117, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.254201889038086, |
|
"learning_rate": 1.7071186440677966e-05, |
|
"loss": 1.4404, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.1423888206481934, |
|
"learning_rate": 1.706440677966102e-05, |
|
"loss": 1.5539, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.927777647972107, |
|
"learning_rate": 1.705762711864407e-05, |
|
"loss": 1.5285, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.6136608123779297, |
|
"learning_rate": 1.705084745762712e-05, |
|
"loss": 1.3503, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.160021781921387, |
|
"learning_rate": 1.704406779661017e-05, |
|
"loss": 1.5243, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.0722756385803223, |
|
"learning_rate": 1.703728813559322e-05, |
|
"loss": 1.5306, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.304449558258057, |
|
"learning_rate": 1.703050847457627e-05, |
|
"loss": 1.5878, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.935084581375122, |
|
"learning_rate": 1.7023728813559324e-05, |
|
"loss": 1.5446, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.6202478408813477, |
|
"learning_rate": 1.7016949152542374e-05, |
|
"loss": 1.6461, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.2252795696258545, |
|
"learning_rate": 1.7010169491525427e-05, |
|
"loss": 1.445, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.115199565887451, |
|
"learning_rate": 1.7003389830508477e-05, |
|
"loss": 1.5513, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.852402448654175, |
|
"learning_rate": 1.6996610169491526e-05, |
|
"loss": 1.496, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 2.5148980617523193, |
|
"learning_rate": 1.698983050847458e-05, |
|
"loss": 1.4281, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.3501824140548706, |
|
"learning_rate": 1.698305084745763e-05, |
|
"loss": 1.4301, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.4159467220306396, |
|
"learning_rate": 1.697627118644068e-05, |
|
"loss": 1.5491, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 4.547379970550537, |
|
"learning_rate": 1.696949152542373e-05, |
|
"loss": 1.5137, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 3.4466910362243652, |
|
"learning_rate": 1.6962711864406782e-05, |
|
"loss": 1.4767, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 6.4336395263671875, |
|
"learning_rate": 1.695593220338983e-05, |
|
"loss": 1.5213, |
|
"step": 4990 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.0396947860717773, |
|
"learning_rate": 1.694915254237288e-05, |
|
"loss": 1.4402, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"eval_loss": 1.4466354846954346, |
|
"eval_runtime": 66.2264, |
|
"eval_samples_per_second": 15.1, |
|
"eval_steps_per_second": 15.1, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.8836324214935303, |
|
"learning_rate": 1.6942372881355934e-05, |
|
"loss": 1.6772, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.293881893157959, |
|
"learning_rate": 1.6935593220338984e-05, |
|
"loss": 1.4618, |
|
"step": 5020 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.111931800842285, |
|
"learning_rate": 1.6928813559322034e-05, |
|
"loss": 1.5564, |
|
"step": 5030 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.7768237590789795, |
|
"learning_rate": 1.6922033898305087e-05, |
|
"loss": 1.5239, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.8081879615783691, |
|
"learning_rate": 1.6915254237288136e-05, |
|
"loss": 1.5257, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.2274467945098877, |
|
"learning_rate": 1.6908474576271186e-05, |
|
"loss": 1.3785, |
|
"step": 5060 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.4177571535110474, |
|
"learning_rate": 1.690169491525424e-05, |
|
"loss": 1.4504, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.527300834655762, |
|
"learning_rate": 1.689491525423729e-05, |
|
"loss": 1.4865, |
|
"step": 5080 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.1885902881622314, |
|
"learning_rate": 1.6888135593220342e-05, |
|
"loss": 1.5584, |
|
"step": 5090 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.6522934436798096, |
|
"learning_rate": 1.6881355932203392e-05, |
|
"loss": 1.4736, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.8651719093322754, |
|
"learning_rate": 1.687457627118644e-05, |
|
"loss": 1.6247, |
|
"step": 5110 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.211879253387451, |
|
"learning_rate": 1.6867796610169495e-05, |
|
"loss": 1.4822, |
|
"step": 5120 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.725778579711914, |
|
"learning_rate": 1.6861016949152544e-05, |
|
"loss": 1.6364, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.7316319942474365, |
|
"learning_rate": 1.6854237288135594e-05, |
|
"loss": 1.4218, |
|
"step": 5140 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.2881827354431152, |
|
"learning_rate": 1.6847457627118647e-05, |
|
"loss": 1.4729, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.106601238250732, |
|
"learning_rate": 1.6840677966101697e-05, |
|
"loss": 1.5303, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.9981019496917725, |
|
"learning_rate": 1.683389830508475e-05, |
|
"loss": 1.4583, |
|
"step": 5170 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.5599071979522705, |
|
"learning_rate": 1.6827118644067796e-05, |
|
"loss": 1.5817, |
|
"step": 5180 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.673508644104004, |
|
"learning_rate": 1.682033898305085e-05, |
|
"loss": 1.4967, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 7.4875922203063965, |
|
"learning_rate": 1.68135593220339e-05, |
|
"loss": 1.4482, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.208539962768555, |
|
"learning_rate": 1.680677966101695e-05, |
|
"loss": 1.5031, |
|
"step": 5210 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.5398664474487305, |
|
"learning_rate": 1.6800000000000002e-05, |
|
"loss": 1.3661, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.9544267654418945, |
|
"learning_rate": 1.679322033898305e-05, |
|
"loss": 1.599, |
|
"step": 5230 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.121668815612793, |
|
"learning_rate": 1.67864406779661e-05, |
|
"loss": 1.4801, |
|
"step": 5240 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.368370532989502, |
|
"learning_rate": 1.6779661016949154e-05, |
|
"loss": 1.5737, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.03584098815918, |
|
"learning_rate": 1.6772881355932204e-05, |
|
"loss": 1.4995, |
|
"step": 5260 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 5.6201090812683105, |
|
"learning_rate": 1.6766101694915257e-05, |
|
"loss": 1.3567, |
|
"step": 5270 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.009582996368408, |
|
"learning_rate": 1.6759322033898307e-05, |
|
"loss": 1.4241, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.4552884101867676, |
|
"learning_rate": 1.6752542372881357e-05, |
|
"loss": 1.3472, |
|
"step": 5290 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 4.6100029945373535, |
|
"learning_rate": 1.674576271186441e-05, |
|
"loss": 1.3696, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 6.752505779266357, |
|
"learning_rate": 1.673898305084746e-05, |
|
"loss": 1.5192, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.4629709720611572, |
|
"learning_rate": 1.673220338983051e-05, |
|
"loss": 1.5774, |
|
"step": 5320 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.904249906539917, |
|
"learning_rate": 1.6725423728813562e-05, |
|
"loss": 1.4843, |
|
"step": 5330 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.3308334350585938, |
|
"learning_rate": 1.6718644067796612e-05, |
|
"loss": 1.6006, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.405097484588623, |
|
"learning_rate": 1.671186440677966e-05, |
|
"loss": 1.5736, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 3.4328196048736572, |
|
"learning_rate": 1.6705084745762715e-05, |
|
"loss": 1.4613, |
|
"step": 5360 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.5162967443466187, |
|
"learning_rate": 1.6698305084745764e-05, |
|
"loss": 1.4562, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 2.473905563354492, |
|
"learning_rate": 1.6691525423728817e-05, |
|
"loss": 1.4899, |
|
"step": 5380 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.3600870370864868, |
|
"learning_rate": 1.6684745762711864e-05, |
|
"loss": 1.4197, |
|
"step": 5390 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.9598913192749023, |
|
"learning_rate": 1.6677966101694917e-05, |
|
"loss": 1.4268, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.6608197689056396, |
|
"learning_rate": 1.6671186440677967e-05, |
|
"loss": 1.2926, |
|
"step": 5410 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.557682991027832, |
|
"learning_rate": 1.6664406779661016e-05, |
|
"loss": 1.6283, |
|
"step": 5420 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.969449281692505, |
|
"learning_rate": 1.665762711864407e-05, |
|
"loss": 1.4047, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.7862861156463623, |
|
"learning_rate": 1.665084745762712e-05, |
|
"loss": 1.4233, |
|
"step": 5440 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.7756729125976562, |
|
"learning_rate": 1.6644067796610172e-05, |
|
"loss": 1.4485, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.082773447036743, |
|
"learning_rate": 1.6637288135593222e-05, |
|
"loss": 1.3878, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.100788116455078, |
|
"learning_rate": 1.663050847457627e-05, |
|
"loss": 1.5481, |
|
"step": 5470 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.218986749649048, |
|
"learning_rate": 1.6623728813559325e-05, |
|
"loss": 1.3906, |
|
"step": 5480 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.727528691291809, |
|
"learning_rate": 1.6616949152542374e-05, |
|
"loss": 1.4824, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.321578502655029, |
|
"learning_rate": 1.6610169491525424e-05, |
|
"loss": 1.5586, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"eval_loss": 1.469241976737976, |
|
"eval_runtime": 66.1281, |
|
"eval_samples_per_second": 15.122, |
|
"eval_steps_per_second": 15.122, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.7050514221191406, |
|
"learning_rate": 1.6603389830508477e-05, |
|
"loss": 1.2704, |
|
"step": 5510 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.4852951765060425, |
|
"learning_rate": 1.6596610169491527e-05, |
|
"loss": 1.6239, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.524787664413452, |
|
"learning_rate": 1.6589830508474577e-05, |
|
"loss": 1.45, |
|
"step": 5530 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.804272174835205, |
|
"learning_rate": 1.658305084745763e-05, |
|
"loss": 1.4039, |
|
"step": 5540 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.2891910076141357, |
|
"learning_rate": 1.657627118644068e-05, |
|
"loss": 1.3969, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.775587797164917, |
|
"learning_rate": 1.6569491525423732e-05, |
|
"loss": 1.5571, |
|
"step": 5560 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.006053924560547, |
|
"learning_rate": 1.6562711864406782e-05, |
|
"loss": 1.4743, |
|
"step": 5570 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.258488416671753, |
|
"learning_rate": 1.6555932203389832e-05, |
|
"loss": 1.3094, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.469382405281067, |
|
"learning_rate": 1.654915254237288e-05, |
|
"loss": 1.4247, |
|
"step": 5590 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.234476089477539, |
|
"learning_rate": 1.654237288135593e-05, |
|
"loss": 1.4024, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.297300338745117, |
|
"learning_rate": 1.6535593220338984e-05, |
|
"loss": 1.3422, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.316805362701416, |
|
"learning_rate": 1.6528813559322034e-05, |
|
"loss": 1.4723, |
|
"step": 5620 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.859321117401123, |
|
"learning_rate": 1.6522033898305084e-05, |
|
"loss": 1.4444, |
|
"step": 5630 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 5.158230304718018, |
|
"learning_rate": 1.6515254237288137e-05, |
|
"loss": 1.6279, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.160407781600952, |
|
"learning_rate": 1.6508474576271187e-05, |
|
"loss": 1.4487, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.0725409984588623, |
|
"learning_rate": 1.650169491525424e-05, |
|
"loss": 1.4906, |
|
"step": 5660 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.5659565925598145, |
|
"learning_rate": 1.649491525423729e-05, |
|
"loss": 1.4545, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 8.198447227478027, |
|
"learning_rate": 1.648813559322034e-05, |
|
"loss": 1.5087, |
|
"step": 5680 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.8482344150543213, |
|
"learning_rate": 1.6481355932203392e-05, |
|
"loss": 1.5596, |
|
"step": 5690 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.828826665878296, |
|
"learning_rate": 1.6474576271186442e-05, |
|
"loss": 1.5452, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 9.930662155151367, |
|
"learning_rate": 1.646779661016949e-05, |
|
"loss": 1.4667, |
|
"step": 5710 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.9717953205108643, |
|
"learning_rate": 1.6461016949152545e-05, |
|
"loss": 1.5651, |
|
"step": 5720 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.6129778623580933, |
|
"learning_rate": 1.6454237288135594e-05, |
|
"loss": 1.3477, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 2.341064214706421, |
|
"learning_rate": 1.6447457627118648e-05, |
|
"loss": 1.5159, |
|
"step": 5740 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.6363205909729, |
|
"learning_rate": 1.6440677966101697e-05, |
|
"loss": 1.5524, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 4.433897495269775, |
|
"learning_rate": 1.6433898305084747e-05, |
|
"loss": 1.46, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.2978014945983887, |
|
"learning_rate": 1.64271186440678e-05, |
|
"loss": 1.5847, |
|
"step": 5770 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 3.325284004211426, |
|
"learning_rate": 1.642033898305085e-05, |
|
"loss": 1.3553, |
|
"step": 5780 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.9704495668411255, |
|
"learning_rate": 1.64135593220339e-05, |
|
"loss": 1.3661, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.6186383962631226, |
|
"learning_rate": 1.640677966101695e-05, |
|
"loss": 1.5538, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.1665966510772705, |
|
"learning_rate": 1.64e-05, |
|
"loss": 1.5188, |
|
"step": 5810 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 7.165323734283447, |
|
"learning_rate": 1.6393220338983052e-05, |
|
"loss": 1.627, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 7.434396743774414, |
|
"learning_rate": 1.63864406779661e-05, |
|
"loss": 1.4751, |
|
"step": 5830 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.549182176589966, |
|
"learning_rate": 1.6379661016949155e-05, |
|
"loss": 1.5013, |
|
"step": 5840 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.3907253742218018, |
|
"learning_rate": 1.6372881355932204e-05, |
|
"loss": 1.5532, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.3262014389038086, |
|
"learning_rate": 1.6366101694915254e-05, |
|
"loss": 1.4404, |
|
"step": 5860 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.387146234512329, |
|
"learning_rate": 1.6359322033898307e-05, |
|
"loss": 1.4057, |
|
"step": 5870 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.461369514465332, |
|
"learning_rate": 1.6352542372881357e-05, |
|
"loss": 1.4003, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.3152384757995605, |
|
"learning_rate": 1.6345762711864407e-05, |
|
"loss": 1.6658, |
|
"step": 5890 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.839587450027466, |
|
"learning_rate": 1.633898305084746e-05, |
|
"loss": 1.6396, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5456769466400146, |
|
"learning_rate": 1.633220338983051e-05, |
|
"loss": 1.4543, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.218180775642395, |
|
"learning_rate": 1.6325423728813563e-05, |
|
"loss": 1.6549, |
|
"step": 5920 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.9048516750335693, |
|
"learning_rate": 1.6318644067796612e-05, |
|
"loss": 1.4558, |
|
"step": 5930 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.0440735816955566, |
|
"learning_rate": 1.6311864406779662e-05, |
|
"loss": 1.392, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.26229190826416, |
|
"learning_rate": 1.6305084745762715e-05, |
|
"loss": 1.5081, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.3289761543273926, |
|
"learning_rate": 1.6298305084745765e-05, |
|
"loss": 1.3642, |
|
"step": 5960 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.584726333618164, |
|
"learning_rate": 1.6291525423728814e-05, |
|
"loss": 1.3108, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.9245219230651855, |
|
"learning_rate": 1.6284745762711868e-05, |
|
"loss": 1.5209, |
|
"step": 5980 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5690237283706665, |
|
"learning_rate": 1.6277966101694917e-05, |
|
"loss": 1.4554, |
|
"step": 5990 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.7308597564697266, |
|
"learning_rate": 1.6271186440677967e-05, |
|
"loss": 1.3758, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"eval_loss": 1.4649540185928345, |
|
"eval_runtime": 66.131, |
|
"eval_samples_per_second": 15.122, |
|
"eval_steps_per_second": 15.122, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.45599627494812, |
|
"learning_rate": 1.6264406779661017e-05, |
|
"loss": 1.4816, |
|
"step": 6010 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.358900785446167, |
|
"learning_rate": 1.625762711864407e-05, |
|
"loss": 1.5672, |
|
"step": 6020 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.577291488647461, |
|
"learning_rate": 1.625084745762712e-05, |
|
"loss": 1.536, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.5293314456939697, |
|
"learning_rate": 1.624406779661017e-05, |
|
"loss": 1.3852, |
|
"step": 6040 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.6409552097320557, |
|
"learning_rate": 1.6237288135593222e-05, |
|
"loss": 1.3703, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.245363712310791, |
|
"learning_rate": 1.6230508474576272e-05, |
|
"loss": 1.4489, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.5935614109039307, |
|
"learning_rate": 1.6223728813559322e-05, |
|
"loss": 1.3177, |
|
"step": 6070 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.89214825630188, |
|
"learning_rate": 1.6216949152542375e-05, |
|
"loss": 1.6123, |
|
"step": 6080 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.6698529720306396, |
|
"learning_rate": 1.6210169491525424e-05, |
|
"loss": 1.4598, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 4.818114757537842, |
|
"learning_rate": 1.6203389830508474e-05, |
|
"loss": 1.3712, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 5.041522979736328, |
|
"learning_rate": 1.6196610169491527e-05, |
|
"loss": 1.5556, |
|
"step": 6110 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.3514511585235596, |
|
"learning_rate": 1.6189830508474577e-05, |
|
"loss": 1.5359, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.9724153280258179, |
|
"learning_rate": 1.618305084745763e-05, |
|
"loss": 1.5191, |
|
"step": 6130 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.0398383140563965, |
|
"learning_rate": 1.617627118644068e-05, |
|
"loss": 1.4751, |
|
"step": 6140 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.3548771142959595, |
|
"learning_rate": 1.616949152542373e-05, |
|
"loss": 1.4159, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.616636276245117, |
|
"learning_rate": 1.6162711864406783e-05, |
|
"loss": 1.4667, |
|
"step": 6160 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.348968744277954, |
|
"learning_rate": 1.6155932203389832e-05, |
|
"loss": 1.4056, |
|
"step": 6170 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.5771777629852295, |
|
"learning_rate": 1.6149152542372882e-05, |
|
"loss": 1.4256, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 3.0773298740386963, |
|
"learning_rate": 1.6142372881355935e-05, |
|
"loss": 1.2936, |
|
"step": 6190 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.114321708679199, |
|
"learning_rate": 1.6135593220338985e-05, |
|
"loss": 1.5362, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.142282485961914, |
|
"learning_rate": 1.6128813559322038e-05, |
|
"loss": 1.506, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.1931562423706055, |
|
"learning_rate": 1.6122033898305084e-05, |
|
"loss": 1.581, |
|
"step": 6220 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.0442159175872803, |
|
"learning_rate": 1.6115254237288137e-05, |
|
"loss": 1.4553, |
|
"step": 6230 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.841310501098633, |
|
"learning_rate": 1.6108474576271187e-05, |
|
"loss": 1.5559, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.651193141937256, |
|
"learning_rate": 1.6101694915254237e-05, |
|
"loss": 1.3535, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.571608543395996, |
|
"learning_rate": 1.609491525423729e-05, |
|
"loss": 1.5062, |
|
"step": 6260 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 6.008980751037598, |
|
"learning_rate": 1.608813559322034e-05, |
|
"loss": 1.4696, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 11.914450645446777, |
|
"learning_rate": 1.608135593220339e-05, |
|
"loss": 1.4643, |
|
"step": 6280 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.8239995241165161, |
|
"learning_rate": 1.6074576271186442e-05, |
|
"loss": 1.6175, |
|
"step": 6290 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.543314218521118, |
|
"learning_rate": 1.6067796610169492e-05, |
|
"loss": 1.4362, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.3625097274780273, |
|
"learning_rate": 1.6061016949152545e-05, |
|
"loss": 1.5615, |
|
"step": 6310 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.757710933685303, |
|
"learning_rate": 1.6054237288135595e-05, |
|
"loss": 1.4382, |
|
"step": 6320 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.5102877616882324, |
|
"learning_rate": 1.6047457627118645e-05, |
|
"loss": 1.5367, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.226902484893799, |
|
"learning_rate": 1.6040677966101698e-05, |
|
"loss": 1.7053, |
|
"step": 6340 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.9312872886657715, |
|
"learning_rate": 1.6033898305084747e-05, |
|
"loss": 1.5362, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.0346354246139526, |
|
"learning_rate": 1.6027118644067797e-05, |
|
"loss": 1.6161, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.9462825059890747, |
|
"learning_rate": 1.602033898305085e-05, |
|
"loss": 1.4455, |
|
"step": 6370 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.9478490352630615, |
|
"learning_rate": 1.60135593220339e-05, |
|
"loss": 1.5183, |
|
"step": 6380 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.781726598739624, |
|
"learning_rate": 1.600677966101695e-05, |
|
"loss": 1.3517, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.242448091506958, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.6257, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3541291952133179, |
|
"learning_rate": 1.5993220338983052e-05, |
|
"loss": 1.4108, |
|
"step": 6410 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.1203219890594482, |
|
"learning_rate": 1.5986440677966105e-05, |
|
"loss": 1.5363, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.9840617179870605, |
|
"learning_rate": 1.5979661016949152e-05, |
|
"loss": 1.4282, |
|
"step": 6430 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.6846158504486084, |
|
"learning_rate": 1.5972881355932205e-05, |
|
"loss": 1.4535, |
|
"step": 6440 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.272042155265808, |
|
"learning_rate": 1.5966101694915255e-05, |
|
"loss": 1.5842, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.9574079513549805, |
|
"learning_rate": 1.5959322033898304e-05, |
|
"loss": 1.3752, |
|
"step": 6460 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.928889751434326, |
|
"learning_rate": 1.5952542372881357e-05, |
|
"loss": 1.4675, |
|
"step": 6470 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.292194843292236, |
|
"learning_rate": 1.5945762711864407e-05, |
|
"loss": 1.5172, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.3911092281341553, |
|
"learning_rate": 1.593898305084746e-05, |
|
"loss": 1.467, |
|
"step": 6490 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 4.30944299697876, |
|
"learning_rate": 1.593220338983051e-05, |
|
"loss": 1.3515, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"eval_loss": 1.465691089630127, |
|
"eval_runtime": 66.1688, |
|
"eval_samples_per_second": 15.113, |
|
"eval_steps_per_second": 15.113, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.5098283290863037, |
|
"learning_rate": 1.592542372881356e-05, |
|
"loss": 1.5554, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 7.33607292175293, |
|
"learning_rate": 1.5918644067796613e-05, |
|
"loss": 1.6483, |
|
"step": 6520 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.0138282775878906, |
|
"learning_rate": 1.5911864406779662e-05, |
|
"loss": 1.5784, |
|
"step": 6530 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.527956008911133, |
|
"learning_rate": 1.5905084745762712e-05, |
|
"loss": 1.265, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 3.4589829444885254, |
|
"learning_rate": 1.5898305084745765e-05, |
|
"loss": 1.6605, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.758669853210449, |
|
"learning_rate": 1.5891525423728815e-05, |
|
"loss": 1.2722, |
|
"step": 6560 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 2.0331597328186035, |
|
"learning_rate": 1.5884745762711865e-05, |
|
"loss": 1.5016, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 7.730234622955322, |
|
"learning_rate": 1.5877966101694918e-05, |
|
"loss": 1.6401, |
|
"step": 6580 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.977071762084961, |
|
"learning_rate": 1.5871186440677967e-05, |
|
"loss": 1.3554, |
|
"step": 6590 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.8177216053009033, |
|
"learning_rate": 1.586440677966102e-05, |
|
"loss": 1.6109, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.232168197631836, |
|
"learning_rate": 1.585762711864407e-05, |
|
"loss": 1.3901, |
|
"step": 6610 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 6.004239082336426, |
|
"learning_rate": 1.585084745762712e-05, |
|
"loss": 1.648, |
|
"step": 6620 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.166013240814209, |
|
"learning_rate": 1.584406779661017e-05, |
|
"loss": 1.4462, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.389451503753662, |
|
"learning_rate": 1.583728813559322e-05, |
|
"loss": 1.5163, |
|
"step": 6640 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.042405843734741, |
|
"learning_rate": 1.5830508474576272e-05, |
|
"loss": 1.4931, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.3263092041015625, |
|
"learning_rate": 1.5823728813559322e-05, |
|
"loss": 1.4758, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.392455577850342, |
|
"learning_rate": 1.5816949152542372e-05, |
|
"loss": 1.6324, |
|
"step": 6670 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.789013385772705, |
|
"learning_rate": 1.5810169491525425e-05, |
|
"loss": 1.5744, |
|
"step": 6680 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.284268617630005, |
|
"learning_rate": 1.5803389830508475e-05, |
|
"loss": 1.6875, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.3472728729248047, |
|
"learning_rate": 1.5796610169491528e-05, |
|
"loss": 1.5855, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.676074981689453, |
|
"learning_rate": 1.5789830508474577e-05, |
|
"loss": 1.4166, |
|
"step": 6710 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.129818916320801, |
|
"learning_rate": 1.5783050847457627e-05, |
|
"loss": 1.3874, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.993739604949951, |
|
"learning_rate": 1.577627118644068e-05, |
|
"loss": 1.4709, |
|
"step": 6730 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.868453025817871, |
|
"learning_rate": 1.576949152542373e-05, |
|
"loss": 1.5804, |
|
"step": 6740 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.086580753326416, |
|
"learning_rate": 1.576271186440678e-05, |
|
"loss": 1.3879, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.881350994110107, |
|
"learning_rate": 1.5755932203389833e-05, |
|
"loss": 1.4923, |
|
"step": 6760 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.4320054054260254, |
|
"learning_rate": 1.5749152542372882e-05, |
|
"loss": 1.401, |
|
"step": 6770 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.2193899154663086, |
|
"learning_rate": 1.5742372881355936e-05, |
|
"loss": 1.5497, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.470163583755493, |
|
"learning_rate": 1.5735593220338985e-05, |
|
"loss": 1.4824, |
|
"step": 6790 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.5179107189178467, |
|
"learning_rate": 1.5728813559322035e-05, |
|
"loss": 1.5401, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.4173359870910645, |
|
"learning_rate": 1.5722033898305088e-05, |
|
"loss": 1.3075, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.17536735534668, |
|
"learning_rate": 1.5715254237288138e-05, |
|
"loss": 1.5135, |
|
"step": 6820 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.278881311416626, |
|
"learning_rate": 1.5708474576271187e-05, |
|
"loss": 1.5745, |
|
"step": 6830 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.718890905380249, |
|
"learning_rate": 1.5701694915254237e-05, |
|
"loss": 1.5397, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.8117082118988037, |
|
"learning_rate": 1.5694915254237287e-05, |
|
"loss": 1.5904, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.4601502418518066, |
|
"learning_rate": 1.568813559322034e-05, |
|
"loss": 1.5485, |
|
"step": 6860 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.0269432067871094, |
|
"learning_rate": 1.568135593220339e-05, |
|
"loss": 1.4487, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.047973394393921, |
|
"learning_rate": 1.5674576271186443e-05, |
|
"loss": 1.2999, |
|
"step": 6880 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.7541468143463135, |
|
"learning_rate": 1.5667796610169492e-05, |
|
"loss": 1.4651, |
|
"step": 6890 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.5456767082214355, |
|
"learning_rate": 1.5661016949152542e-05, |
|
"loss": 1.4433, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.4916276931762695, |
|
"learning_rate": 1.5654237288135595e-05, |
|
"loss": 1.5764, |
|
"step": 6910 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.6278750896453857, |
|
"learning_rate": 1.5647457627118645e-05, |
|
"loss": 1.3208, |
|
"step": 6920 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.9469902515411377, |
|
"learning_rate": 1.5640677966101695e-05, |
|
"loss": 1.6433, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 5.235908031463623, |
|
"learning_rate": 1.5633898305084748e-05, |
|
"loss": 1.2608, |
|
"step": 6940 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.5849263668060303, |
|
"learning_rate": 1.5627118644067798e-05, |
|
"loss": 1.5088, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.6014277935028076, |
|
"learning_rate": 1.562033898305085e-05, |
|
"loss": 1.3976, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.220320701599121, |
|
"learning_rate": 1.56135593220339e-05, |
|
"loss": 1.5011, |
|
"step": 6970 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 3.434382915496826, |
|
"learning_rate": 1.560677966101695e-05, |
|
"loss": 1.4733, |
|
"step": 6980 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 4.612408638000488, |
|
"learning_rate": 1.5600000000000003e-05, |
|
"loss": 1.4618, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.9301650524139404, |
|
"learning_rate": 1.5593220338983053e-05, |
|
"loss": 1.4165, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"eval_loss": 1.469406247138977, |
|
"eval_runtime": 66.2443, |
|
"eval_samples_per_second": 15.096, |
|
"eval_steps_per_second": 15.096, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.865874767303467, |
|
"learning_rate": 1.5586440677966103e-05, |
|
"loss": 1.3156, |
|
"step": 7010 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.411038875579834, |
|
"learning_rate": 1.5579661016949156e-05, |
|
"loss": 1.4303, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.869673728942871, |
|
"learning_rate": 1.5572881355932205e-05, |
|
"loss": 1.5055, |
|
"step": 7030 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.0629090070724487, |
|
"learning_rate": 1.5566101694915255e-05, |
|
"loss": 1.6094, |
|
"step": 7040 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.271601676940918, |
|
"learning_rate": 1.5559322033898305e-05, |
|
"loss": 1.4648, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.5404294729232788, |
|
"learning_rate": 1.5552542372881358e-05, |
|
"loss": 1.3793, |
|
"step": 7060 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.9599192142486572, |
|
"learning_rate": 1.5545762711864408e-05, |
|
"loss": 1.3563, |
|
"step": 7070 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 6.35668420791626, |
|
"learning_rate": 1.5538983050847457e-05, |
|
"loss": 1.4637, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 7.315426349639893, |
|
"learning_rate": 1.553220338983051e-05, |
|
"loss": 1.3284, |
|
"step": 7090 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.8330402374267578, |
|
"learning_rate": 1.552542372881356e-05, |
|
"loss": 1.3762, |
|
"step": 7100 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.072326183319092, |
|
"learning_rate": 1.551864406779661e-05, |
|
"loss": 1.4194, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.952779769897461, |
|
"learning_rate": 1.5511864406779663e-05, |
|
"loss": 1.4886, |
|
"step": 7120 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.332074165344238, |
|
"learning_rate": 1.5505084745762713e-05, |
|
"loss": 1.4912, |
|
"step": 7130 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.962344169616699, |
|
"learning_rate": 1.5498305084745762e-05, |
|
"loss": 1.5502, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 7.930359363555908, |
|
"learning_rate": 1.5491525423728815e-05, |
|
"loss": 1.471, |
|
"step": 7150 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.925573825836182, |
|
"learning_rate": 1.5484745762711865e-05, |
|
"loss": 1.3155, |
|
"step": 7160 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.2943711280822754, |
|
"learning_rate": 1.5477966101694918e-05, |
|
"loss": 1.3094, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.9433196783065796, |
|
"learning_rate": 1.5471186440677968e-05, |
|
"loss": 1.3453, |
|
"step": 7180 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.8384184837341309, |
|
"learning_rate": 1.5464406779661018e-05, |
|
"loss": 1.3788, |
|
"step": 7190 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.92781138420105, |
|
"learning_rate": 1.545762711864407e-05, |
|
"loss": 1.5704, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.0172346830368042, |
|
"learning_rate": 1.545084745762712e-05, |
|
"loss": 1.5442, |
|
"step": 7210 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.0915580987930298, |
|
"learning_rate": 1.544406779661017e-05, |
|
"loss": 1.5226, |
|
"step": 7220 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 9.170607566833496, |
|
"learning_rate": 1.5437288135593223e-05, |
|
"loss": 1.5066, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.84716796875, |
|
"learning_rate": 1.5430508474576273e-05, |
|
"loss": 1.4508, |
|
"step": 7240 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.949992656707764, |
|
"learning_rate": 1.5423728813559326e-05, |
|
"loss": 1.3393, |
|
"step": 7250 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.912881851196289, |
|
"learning_rate": 1.5416949152542372e-05, |
|
"loss": 1.5918, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.8128445148468018, |
|
"learning_rate": 1.5410169491525425e-05, |
|
"loss": 1.5794, |
|
"step": 7270 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 3.9023091793060303, |
|
"learning_rate": 1.5403389830508475e-05, |
|
"loss": 1.345, |
|
"step": 7280 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.263958692550659, |
|
"learning_rate": 1.5396610169491525e-05, |
|
"loss": 1.3651, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 8.103110313415527, |
|
"learning_rate": 1.5389830508474578e-05, |
|
"loss": 1.4784, |
|
"step": 7300 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.5859456062316895, |
|
"learning_rate": 1.5383050847457628e-05, |
|
"loss": 1.4087, |
|
"step": 7310 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 5.6322503089904785, |
|
"learning_rate": 1.5376271186440677e-05, |
|
"loss": 1.518, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.4485771656036377, |
|
"learning_rate": 1.536949152542373e-05, |
|
"loss": 1.3471, |
|
"step": 7330 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.0619120597839355, |
|
"learning_rate": 1.536271186440678e-05, |
|
"loss": 1.2563, |
|
"step": 7340 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.037415027618408, |
|
"learning_rate": 1.5355932203389833e-05, |
|
"loss": 1.4414, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.4679503440856934, |
|
"learning_rate": 1.5349152542372883e-05, |
|
"loss": 1.4021, |
|
"step": 7360 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.4774163961410522, |
|
"learning_rate": 1.5342372881355933e-05, |
|
"loss": 1.4331, |
|
"step": 7370 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.21893310546875, |
|
"learning_rate": 1.5335593220338986e-05, |
|
"loss": 1.4847, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 4.261248588562012, |
|
"learning_rate": 1.5328813559322035e-05, |
|
"loss": 1.4098, |
|
"step": 7390 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 2.419386625289917, |
|
"learning_rate": 1.5322033898305085e-05, |
|
"loss": 1.4635, |
|
"step": 7400 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.3119351863861084, |
|
"learning_rate": 1.5315254237288138e-05, |
|
"loss": 1.3862, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.350612163543701, |
|
"learning_rate": 1.5308474576271188e-05, |
|
"loss": 1.5094, |
|
"step": 7420 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.089937925338745, |
|
"learning_rate": 1.530169491525424e-05, |
|
"loss": 1.6249, |
|
"step": 7430 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 6.980156898498535, |
|
"learning_rate": 1.529491525423729e-05, |
|
"loss": 1.6102, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.881622552871704, |
|
"learning_rate": 1.528813559322034e-05, |
|
"loss": 1.5149, |
|
"step": 7450 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 2.8510727882385254, |
|
"learning_rate": 1.528135593220339e-05, |
|
"loss": 1.4542, |
|
"step": 7460 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.4826130867004395, |
|
"learning_rate": 1.527457627118644e-05, |
|
"loss": 1.5085, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.991846084594727, |
|
"learning_rate": 1.5267796610169493e-05, |
|
"loss": 1.4761, |
|
"step": 7480 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 4.136972904205322, |
|
"learning_rate": 1.5261016949152543e-05, |
|
"loss": 1.4851, |
|
"step": 7490 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 3.58878231048584, |
|
"learning_rate": 1.5254237288135594e-05, |
|
"loss": 1.4117, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"eval_loss": 1.437897801399231, |
|
"eval_runtime": 66.1821, |
|
"eval_samples_per_second": 15.11, |
|
"eval_steps_per_second": 15.11, |
|
"step": 7500 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 30000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 2500, |
|
"total_flos": 1.2076594495488e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|