|
{ |
|
"best_metric": 0.05274021625518799, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.08663634394628547, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00043318171973142733, |
|
"grad_norm": 3.9565112590789795, |
|
"learning_rate": 5e-06, |
|
"loss": 0.7524, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00043318171973142733, |
|
"eval_loss": 4.976785659790039, |
|
"eval_runtime": 204.1866, |
|
"eval_samples_per_second": 19.041, |
|
"eval_steps_per_second": 9.521, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0008663634394628547, |
|
"grad_norm": 7.407309532165527, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3786, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.001299545159194282, |
|
"grad_norm": 9.137940406799316, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.6639, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0017327268789257093, |
|
"grad_norm": 9.636576652526855, |
|
"learning_rate": 2e-05, |
|
"loss": 1.6429, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0021659085986571367, |
|
"grad_norm": 7.883628845214844, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.5626, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.002599090318388564, |
|
"grad_norm": 6.31871223449707, |
|
"learning_rate": 3e-05, |
|
"loss": 1.5388, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0030322720381199915, |
|
"grad_norm": 4.32879638671875, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.4263, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0034654537578514186, |
|
"grad_norm": 3.1516454219818115, |
|
"learning_rate": 4e-05, |
|
"loss": 1.3707, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.003898635477582846, |
|
"grad_norm": 3.7077221870422363, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.3346, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.004331817197314273, |
|
"grad_norm": 3.2946524620056152, |
|
"learning_rate": 5e-05, |
|
"loss": 1.3252, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.004764998917045701, |
|
"grad_norm": 3.5760467052459717, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.159, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.005198180636777128, |
|
"grad_norm": 4.902008056640625, |
|
"learning_rate": 6e-05, |
|
"loss": 0.9312, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.005631362356508555, |
|
"grad_norm": 6.108403205871582, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 0.8111, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.006064544076239983, |
|
"grad_norm": 6.699007511138916, |
|
"learning_rate": 7e-05, |
|
"loss": 0.4968, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.00649772579597141, |
|
"grad_norm": 6.076148986816406, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.6053, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.006930907515702837, |
|
"grad_norm": 4.3749098777771, |
|
"learning_rate": 8e-05, |
|
"loss": 0.496, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.007364089235434265, |
|
"grad_norm": 4.691214561462402, |
|
"learning_rate": 8.5e-05, |
|
"loss": 0.3746, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.007797270955165692, |
|
"grad_norm": 1.961999535560608, |
|
"learning_rate": 9e-05, |
|
"loss": 0.2746, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.00823045267489712, |
|
"grad_norm": 2.699904203414917, |
|
"learning_rate": 9.5e-05, |
|
"loss": 0.308, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.008663634394628547, |
|
"grad_norm": 2.5429675579071045, |
|
"learning_rate": 0.0001, |
|
"loss": 0.2816, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.009096816114359974, |
|
"grad_norm": 5.605649471282959, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 0.2493, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.009529997834091402, |
|
"grad_norm": 2.444078207015991, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 0.2686, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.009963179553822829, |
|
"grad_norm": 2.162829637527466, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 0.1468, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.010396361273554255, |
|
"grad_norm": 1.8878003358840942, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 0.1481, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.010829542993285684, |
|
"grad_norm": 1.073910117149353, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 0.1747, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.01126272471301711, |
|
"grad_norm": 4.712179660797119, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 0.1783, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.011695906432748537, |
|
"grad_norm": 3.8842508792877197, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 0.1553, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.012129088152479966, |
|
"grad_norm": 1.4134128093719482, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 0.1073, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.012562269872211393, |
|
"grad_norm": 1.8056902885437012, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 0.1946, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.01299545159194282, |
|
"grad_norm": 1.0871001482009888, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 0.0863, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.013428633311674248, |
|
"grad_norm": 1.1687226295471191, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 0.0939, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.013861815031405674, |
|
"grad_norm": 0.7801273465156555, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 0.067, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.014294996751137101, |
|
"grad_norm": 1.7727514505386353, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 0.1051, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.01472817847086853, |
|
"grad_norm": 1.272719144821167, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 0.0542, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.015161360190599956, |
|
"grad_norm": 0.9942130446434021, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 0.0976, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.015594541910331383, |
|
"grad_norm": 1.0211302042007446, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 0.051, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.01602772363006281, |
|
"grad_norm": 0.9490691423416138, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 0.0635, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.01646090534979424, |
|
"grad_norm": 0.638464629650116, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.0638, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.016894087069525665, |
|
"grad_norm": 0.5109328031539917, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 0.0233, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.017327268789257094, |
|
"grad_norm": 0.8561784029006958, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 0.0479, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.017760450508988522, |
|
"grad_norm": 1.437472939491272, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 0.0846, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.018193632228719947, |
|
"grad_norm": 0.5984365344047546, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 0.0423, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.018626813948451375, |
|
"grad_norm": 1.3377480506896973, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 0.0807, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.019059995668182804, |
|
"grad_norm": 0.24663518369197845, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 0.0189, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.01949317738791423, |
|
"grad_norm": 0.4021802544593811, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 0.0141, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.019926359107645657, |
|
"grad_norm": 0.26074135303497314, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 0.0108, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.020359540827377086, |
|
"grad_norm": 0.31161004304885864, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 0.0197, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.02079272254710851, |
|
"grad_norm": 0.357147216796875, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 0.0062, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.02122590426683994, |
|
"grad_norm": 0.22662493586540222, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 0.008, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.021659085986571368, |
|
"grad_norm": 0.31289562582969666, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.008, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.021659085986571368, |
|
"eval_loss": 0.2094399482011795, |
|
"eval_runtime": 204.5839, |
|
"eval_samples_per_second": 19.004, |
|
"eval_steps_per_second": 9.502, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.022092267706302793, |
|
"grad_norm": 2.441636323928833, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 0.2969, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.02252544942603422, |
|
"grad_norm": 3.2028415203094482, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 0.2594, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.02295863114576565, |
|
"grad_norm": 1.429338812828064, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 0.21, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.023391812865497075, |
|
"grad_norm": 0.7525247931480408, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 0.1651, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.023824994585228503, |
|
"grad_norm": 2.9362690448760986, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 0.2147, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.02425817630495993, |
|
"grad_norm": 2.608370780944824, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.2761, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.024691358024691357, |
|
"grad_norm": 2.0152244567871094, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 0.1976, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.025124539744422785, |
|
"grad_norm": 0.9376019835472107, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 0.1674, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.025557721464154214, |
|
"grad_norm": 0.7567312121391296, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 0.1707, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.02599090318388564, |
|
"grad_norm": 0.4066583812236786, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.1036, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.026424084903617067, |
|
"grad_norm": 0.9996243119239807, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 0.1479, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.026857266623348496, |
|
"grad_norm": 1.146002173423767, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 0.1592, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.02729044834307992, |
|
"grad_norm": 0.6335452198982239, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 0.1147, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.02772363006281135, |
|
"grad_norm": 0.46830275654792786, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 0.1149, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.028156811782542777, |
|
"grad_norm": 0.5706634521484375, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 0.1181, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.028589993502274202, |
|
"grad_norm": 0.4780918061733246, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 0.0775, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.02902317522200563, |
|
"grad_norm": 0.45546865463256836, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 0.0855, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.02945635694173706, |
|
"grad_norm": 0.8334485292434692, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 0.0958, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.029889538661468484, |
|
"grad_norm": 0.6183558702468872, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 0.0958, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.030322720381199913, |
|
"grad_norm": 0.7801946997642517, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.1081, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03075590210093134, |
|
"grad_norm": 0.5213829278945923, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 0.0898, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.031189083820662766, |
|
"grad_norm": 0.6553680300712585, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 0.1158, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0316222655403942, |
|
"grad_norm": 0.6341337561607361, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 0.0805, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.03205544726012562, |
|
"grad_norm": 0.47776395082473755, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.0671, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.03248862897985705, |
|
"grad_norm": 0.6857931017875671, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 0.0757, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03292181069958848, |
|
"grad_norm": 0.7055604457855225, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 0.0995, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.033354992419319905, |
|
"grad_norm": 0.8226217031478882, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 0.0734, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.03378817413905133, |
|
"grad_norm": 0.6480997204780579, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 0.0708, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.03422135585878276, |
|
"grad_norm": 0.5634680390357971, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 0.06, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.03465453757851419, |
|
"grad_norm": 0.5001603364944458, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.0499, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03508771929824561, |
|
"grad_norm": 0.3587808609008789, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 0.0339, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.035520901017977044, |
|
"grad_norm": 0.5878409147262573, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 0.0634, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.03595408273770847, |
|
"grad_norm": 0.5627532005310059, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 0.0498, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.036387264457439894, |
|
"grad_norm": 0.522148847579956, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 0.0239, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.036820446177171326, |
|
"grad_norm": 0.6190194487571716, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 0.0461, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.03725362789690275, |
|
"grad_norm": 0.5469635128974915, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 0.0413, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.037686809616634176, |
|
"grad_norm": 0.3395079970359802, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 0.0338, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.03811999133636561, |
|
"grad_norm": 0.8697502017021179, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 0.0772, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.03855317305609703, |
|
"grad_norm": 0.6907822489738464, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 0.0291, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.03898635477582846, |
|
"grad_norm": 0.47123125195503235, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 0.0268, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03941953649555989, |
|
"grad_norm": 0.2734449803829193, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 0.012, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.039852718215291315, |
|
"grad_norm": 0.27939316630363464, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.0145, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.04028589993502274, |
|
"grad_norm": 0.2553471028804779, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 0.0174, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.04071908165475417, |
|
"grad_norm": 0.15355971455574036, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 0.006, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.0411522633744856, |
|
"grad_norm": 0.2645378112792969, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 0.0155, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04158544509421702, |
|
"grad_norm": 0.36685898900032043, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 0.0247, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.042018626813948454, |
|
"grad_norm": 0.24984949827194214, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 0.0127, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.04245180853367988, |
|
"grad_norm": 0.42770883440971375, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 0.0153, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.042884990253411304, |
|
"grad_norm": 0.01884501427412033, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 0.0004, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.043318171973142736, |
|
"grad_norm": 0.1676054745912552, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 0.0012, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.043318171973142736, |
|
"eval_loss": 0.08056939393281937, |
|
"eval_runtime": 205.2456, |
|
"eval_samples_per_second": 18.943, |
|
"eval_steps_per_second": 9.472, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04375135369287416, |
|
"grad_norm": 0.9752314686775208, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 0.1654, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.044184535412605586, |
|
"grad_norm": 0.903446614742279, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 0.1832, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.04461771713233702, |
|
"grad_norm": 0.6539014577865601, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 0.1592, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.04505089885206844, |
|
"grad_norm": 0.4455128014087677, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 0.1276, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.04548408057179987, |
|
"grad_norm": 0.5171334743499756, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 0.1779, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.0459172622915313, |
|
"grad_norm": 0.6053957343101501, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 0.1559, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.046350444011262724, |
|
"grad_norm": 0.5147733092308044, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 0.1247, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.04678362573099415, |
|
"grad_norm": 0.6188340783119202, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 0.1215, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.04721680745072558, |
|
"grad_norm": 0.4769625663757324, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 0.1144, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.047649989170457006, |
|
"grad_norm": 0.5022192597389221, |
|
"learning_rate": 5e-05, |
|
"loss": 0.1214, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04808317089018843, |
|
"grad_norm": 0.3280904293060303, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 0.0796, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.04851635260991986, |
|
"grad_norm": 0.5279192328453064, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 0.1151, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.04894953432965129, |
|
"grad_norm": 0.47400593757629395, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 0.1186, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.04938271604938271, |
|
"grad_norm": 0.6582249999046326, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 0.0961, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.049815897769114145, |
|
"grad_norm": 0.40480566024780273, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 0.0765, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.05024907948884557, |
|
"grad_norm": 0.3763730823993683, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 0.0739, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.050682261208576995, |
|
"grad_norm": 0.40720927715301514, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 0.0798, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.05111544292830843, |
|
"grad_norm": 0.7141808271408081, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 0.0874, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.05154862464803985, |
|
"grad_norm": 0.5295721888542175, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 0.0861, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.05198180636777128, |
|
"grad_norm": 0.3550686240196228, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.0749, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05241498808750271, |
|
"grad_norm": 0.5598456859588623, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 0.139, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.052848169807234134, |
|
"grad_norm": 0.3951553702354431, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 0.0669, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.05328135152696556, |
|
"grad_norm": 0.5975047945976257, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 0.1032, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.05371453324669699, |
|
"grad_norm": 0.49492087960243225, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 0.0682, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.054147714966428416, |
|
"grad_norm": 0.540847897529602, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 0.0871, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.05458089668615984, |
|
"grad_norm": 0.44237419962882996, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 0.0571, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.05501407840589127, |
|
"grad_norm": 0.5057970881462097, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 0.0885, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.0554472601256227, |
|
"grad_norm": 0.3728044629096985, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.0395, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.05588044184535412, |
|
"grad_norm": 0.39620381593704224, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 0.048, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.056313623565085555, |
|
"grad_norm": 0.5850706696510315, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 0.0673, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05674680528481698, |
|
"grad_norm": 0.4705985486507416, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 0.0457, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.057179987004548405, |
|
"grad_norm": 0.39884859323501587, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 0.0609, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.05761316872427984, |
|
"grad_norm": 0.34650862216949463, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 0.0439, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.05804635044401126, |
|
"grad_norm": 0.5266785621643066, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 0.0665, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.05847953216374269, |
|
"grad_norm": 0.23444664478302002, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 0.0308, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.05891271388347412, |
|
"grad_norm": 0.25425028800964355, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 0.0171, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.059345895603205544, |
|
"grad_norm": 0.40159934759140015, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 0.0369, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.05977907732293697, |
|
"grad_norm": 0.32667964696884155, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 0.0294, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.0602122590426684, |
|
"grad_norm": 0.40988171100616455, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 0.039, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.060645440762399826, |
|
"grad_norm": 0.23806576430797577, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.0233, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06107862248213125, |
|
"grad_norm": 0.8812161087989807, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 0.043, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.06151180420186268, |
|
"grad_norm": 0.38497522473335266, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 0.0168, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.06194498592159411, |
|
"grad_norm": 0.2601085901260376, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 0.0048, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.06237816764132553, |
|
"grad_norm": 0.2766932249069214, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 0.0226, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.06281134936105696, |
|
"grad_norm": 0.09686120599508286, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 0.0024, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.0632445310807884, |
|
"grad_norm": 0.08909226208925247, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.0031, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.06367771280051981, |
|
"grad_norm": 0.2203822135925293, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 0.0149, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.06411089452025125, |
|
"grad_norm": 0.12339666485786438, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 0.0041, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.06454407623998268, |
|
"grad_norm": 0.2815093696117401, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 0.0132, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.0649772579597141, |
|
"grad_norm": 0.0794215053319931, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 0.0021, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0649772579597141, |
|
"eval_loss": 0.05704093724489212, |
|
"eval_runtime": 204.8642, |
|
"eval_samples_per_second": 18.978, |
|
"eval_steps_per_second": 9.489, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06541043967944553, |
|
"grad_norm": 0.3460334539413452, |
|
"learning_rate": 1.7197048550474643e-05, |
|
"loss": 0.1264, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.06584362139917696, |
|
"grad_norm": 0.30614346265792847, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 0.0801, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.06627680311890838, |
|
"grad_norm": 0.39302000403404236, |
|
"learning_rate": 1.5900081996875083e-05, |
|
"loss": 0.1032, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.06670998483863981, |
|
"grad_norm": 0.45320767164230347, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 0.1144, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.06714316655837124, |
|
"grad_norm": 0.4174154996871948, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 0.1109, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.06757634827810266, |
|
"grad_norm": 0.3307155966758728, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 0.09, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.06800952999783409, |
|
"grad_norm": 0.35494112968444824, |
|
"learning_rate": 1.3432314919041478e-05, |
|
"loss": 0.0815, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.06844271171756552, |
|
"grad_norm": 0.39967793226242065, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 0.1042, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.06887589343729694, |
|
"grad_norm": 0.5634881258010864, |
|
"learning_rate": 1.22645209888614e-05, |
|
"loss": 0.1295, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.06930907515702837, |
|
"grad_norm": 0.371140718460083, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 0.0926, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.0697422568767598, |
|
"grad_norm": 0.35622888803482056, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 0.0781, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.07017543859649122, |
|
"grad_norm": 0.43054744601249695, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 0.0843, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.07060862031622266, |
|
"grad_norm": 0.3432992100715637, |
|
"learning_rate": 1.006822449763537e-05, |
|
"loss": 0.0529, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.07104180203595409, |
|
"grad_norm": 0.2856929898262024, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.0602, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.0714749837556855, |
|
"grad_norm": 0.3701120615005493, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 0.0895, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.07190816547541694, |
|
"grad_norm": 0.37228187918663025, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 0.0793, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.07234134719514837, |
|
"grad_norm": 0.3235107958316803, |
|
"learning_rate": 8.066471602728803e-06, |
|
"loss": 0.0589, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.07277452891487979, |
|
"grad_norm": 0.4608737528324127, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 0.0802, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.07320771063461122, |
|
"grad_norm": 0.3900294005870819, |
|
"learning_rate": 7.1416349648943894e-06, |
|
"loss": 0.0829, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.07364089235434265, |
|
"grad_norm": 0.4117908775806427, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.0706, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 0.41260048747062683, |
|
"learning_rate": 6.269014643030213e-06, |
|
"loss": 0.0834, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.0745072557938055, |
|
"grad_norm": 0.3703679144382477, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 0.056, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.07494043751353693, |
|
"grad_norm": 0.382755309343338, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 0.0484, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.07537361923326835, |
|
"grad_norm": 0.5890815854072571, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 0.1126, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.07580680095299978, |
|
"grad_norm": 0.3182831406593323, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 0.0543, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.07623998267273122, |
|
"grad_norm": 0.44664430618286133, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 0.0824, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.07667316439246263, |
|
"grad_norm": 0.7139434814453125, |
|
"learning_rate": 3.974757327377981e-06, |
|
"loss": 0.0688, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.07710634611219407, |
|
"grad_norm": 0.374163955450058, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 0.0508, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.0775395278319255, |
|
"grad_norm": 0.35698890686035156, |
|
"learning_rate": 3.3209786751399187e-06, |
|
"loss": 0.042, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.07797270955165692, |
|
"grad_norm": 0.38510996103286743, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 0.0394, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07840589127138835, |
|
"grad_norm": 0.5943405628204346, |
|
"learning_rate": 2.724071220034158e-06, |
|
"loss": 0.0716, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.07883907299111978, |
|
"grad_norm": 0.5582049489021301, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.0563, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.0792722547108512, |
|
"grad_norm": 0.2697021961212158, |
|
"learning_rate": 2.1847622018482283e-06, |
|
"loss": 0.0311, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.07970543643058263, |
|
"grad_norm": 0.3521104156970978, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 0.048, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.08013861815031406, |
|
"grad_norm": 0.301751971244812, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 0.0218, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.08057179987004548, |
|
"grad_norm": 0.4299044609069824, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 0.0366, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.08100498158977691, |
|
"grad_norm": 0.48658519983291626, |
|
"learning_rate": 1.2814967607382432e-06, |
|
"loss": 0.0139, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.08143816330950834, |
|
"grad_norm": 0.08318212628364563, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 0.003, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.08187134502923976, |
|
"grad_norm": 0.34329327940940857, |
|
"learning_rate": 9.186408276168013e-07, |
|
"loss": 0.0281, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.0823045267489712, |
|
"grad_norm": 0.23912468552589417, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 0.0172, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.08273770846870263, |
|
"grad_norm": 0.19108746945858002, |
|
"learning_rate": 6.15582970243117e-07, |
|
"loss": 0.012, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.08317089018843404, |
|
"grad_norm": 0.673153281211853, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 0.0185, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.08360407190816548, |
|
"grad_norm": 0.28818559646606445, |
|
"learning_rate": 3.7269241793390085e-07, |
|
"loss": 0.0166, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.08403725362789691, |
|
"grad_norm": 0.23559705913066864, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 0.0113, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.08447043534762833, |
|
"grad_norm": 0.02800111472606659, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 0.001, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.08490361706735976, |
|
"grad_norm": 0.16618917882442474, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 0.006, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.08533679878709119, |
|
"grad_norm": 0.30197080969810486, |
|
"learning_rate": 6.852326227130834e-08, |
|
"loss": 0.0095, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.08576998050682261, |
|
"grad_norm": 0.1305372416973114, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 0.0032, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.08620316222655404, |
|
"grad_norm": 0.37086614966392517, |
|
"learning_rate": 7.615242180436522e-09, |
|
"loss": 0.0082, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.08663634394628547, |
|
"grad_norm": 0.013217504136264324, |
|
"learning_rate": 0.0, |
|
"loss": 0.0003, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08663634394628547, |
|
"eval_loss": 0.05274021625518799, |
|
"eval_runtime": 204.9157, |
|
"eval_samples_per_second": 18.974, |
|
"eval_steps_per_second": 9.487, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.1410122874866893e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|