yesj1234's picture
Upload folder using huggingface_hub
c8c376e
{
"best_metric": 0.6814723610877991,
"best_model_checkpoint": "./jako_mbartLarge_100p_run1/checkpoint-87009",
"epoch": 2.0,
"eval_steps": 500,
"global_step": 87009,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"learning_rate": 4.97e-05,
"loss": 1.9891,
"step": 500
},
{
"epoch": 0.02,
"learning_rate": 4.996196669018189e-05,
"loss": 1.5354,
"step": 1000
},
{
"epoch": 0.03,
"learning_rate": 4.9923626660123305e-05,
"loss": 1.3833,
"step": 1500
},
{
"epoch": 0.05,
"learning_rate": 4.988528663006472e-05,
"loss": 1.2912,
"step": 2000
},
{
"epoch": 0.06,
"learning_rate": 4.984694660000614e-05,
"loss": 1.2355,
"step": 2500
},
{
"epoch": 0.07,
"learning_rate": 4.980860656994755e-05,
"loss": 1.1927,
"step": 3000
},
{
"epoch": 0.08,
"learning_rate": 4.977026653988897e-05,
"loss": 1.166,
"step": 3500
},
{
"epoch": 0.09,
"learning_rate": 4.9731926509830386e-05,
"loss": 1.1238,
"step": 4000
},
{
"epoch": 0.1,
"learning_rate": 4.9693586479771804e-05,
"loss": 1.0972,
"step": 4500
},
{
"epoch": 0.11,
"learning_rate": 4.9655246449713215e-05,
"loss": 1.0964,
"step": 5000
},
{
"epoch": 0.13,
"learning_rate": 4.961690641965463e-05,
"loss": 1.0611,
"step": 5500
},
{
"epoch": 0.14,
"learning_rate": 4.957871974971629e-05,
"loss": 1.0487,
"step": 6000
},
{
"epoch": 0.15,
"learning_rate": 4.9540379719657705e-05,
"loss": 1.0166,
"step": 6500
},
{
"epoch": 0.16,
"learning_rate": 4.9502039689599116e-05,
"loss": 1.0162,
"step": 7000
},
{
"epoch": 0.17,
"learning_rate": 4.946369965954053e-05,
"loss": 0.9952,
"step": 7500
},
{
"epoch": 0.18,
"learning_rate": 4.942535962948195e-05,
"loss": 0.9853,
"step": 8000
},
{
"epoch": 0.2,
"learning_rate": 4.9387096279483484e-05,
"loss": 0.9835,
"step": 8500
},
{
"epoch": 0.21,
"learning_rate": 4.9348832929485017e-05,
"loss": 0.9634,
"step": 9000
},
{
"epoch": 0.22,
"learning_rate": 4.9310492899426434e-05,
"loss": 0.9532,
"step": 9500
},
{
"epoch": 0.23,
"learning_rate": 4.927215286936785e-05,
"loss": 0.955,
"step": 10000
},
{
"epoch": 0.24,
"learning_rate": 4.923381283930927e-05,
"loss": 0.9392,
"step": 10500
},
{
"epoch": 0.25,
"learning_rate": 4.919547280925069e-05,
"loss": 0.9403,
"step": 11000
},
{
"epoch": 0.26,
"learning_rate": 4.91571327791921e-05,
"loss": 0.9177,
"step": 11500
},
{
"epoch": 0.28,
"learning_rate": 4.9118792749133516e-05,
"loss": 0.919,
"step": 12000
},
{
"epoch": 0.29,
"learning_rate": 4.908045271907493e-05,
"loss": 0.9036,
"step": 12500
},
{
"epoch": 0.3,
"learning_rate": 4.9042189369076466e-05,
"loss": 0.9019,
"step": 13000
},
{
"epoch": 0.31,
"learning_rate": 4.9003849339017884e-05,
"loss": 0.8969,
"step": 13500
},
{
"epoch": 0.32,
"learning_rate": 4.89655093089593e-05,
"loss": 0.891,
"step": 14000
},
{
"epoch": 0.33,
"learning_rate": 4.892716927890072e-05,
"loss": 0.8904,
"step": 14500
},
{
"epoch": 0.34,
"learning_rate": 4.888882924884213e-05,
"loss": 0.8833,
"step": 15000
},
{
"epoch": 0.36,
"learning_rate": 4.8850489218783554e-05,
"loss": 0.8668,
"step": 15500
},
{
"epoch": 0.37,
"learning_rate": 4.8812149188724965e-05,
"loss": 0.8692,
"step": 16000
},
{
"epoch": 0.38,
"learning_rate": 4.877380915866638e-05,
"loss": 0.8617,
"step": 16500
},
{
"epoch": 0.39,
"learning_rate": 4.8735469128607794e-05,
"loss": 0.866,
"step": 17000
},
{
"epoch": 0.4,
"learning_rate": 4.869720577860933e-05,
"loss": 0.8592,
"step": 17500
},
{
"epoch": 0.41,
"learning_rate": 4.8658865748550744e-05,
"loss": 0.8586,
"step": 18000
},
{
"epoch": 0.43,
"learning_rate": 4.862052571849217e-05,
"loss": 0.84,
"step": 18500
},
{
"epoch": 0.44,
"learning_rate": 4.8582262368493695e-05,
"loss": 0.84,
"step": 19000
},
{
"epoch": 0.45,
"learning_rate": 4.854392233843512e-05,
"loss": 0.8397,
"step": 19500
},
{
"epoch": 0.46,
"learning_rate": 4.850581234855688e-05,
"loss": 0.8338,
"step": 20000
},
{
"epoch": 0.47,
"learning_rate": 4.84674723184983e-05,
"loss": 0.838,
"step": 20500
},
{
"epoch": 0.48,
"learning_rate": 4.842913228843972e-05,
"loss": 0.8391,
"step": 21000
},
{
"epoch": 0.49,
"learning_rate": 4.8390792258381135e-05,
"loss": 0.8276,
"step": 21500
},
{
"epoch": 0.51,
"learning_rate": 4.8352452228322546e-05,
"loss": 0.8198,
"step": 22000
},
{
"epoch": 0.52,
"learning_rate": 4.8314112198263963e-05,
"loss": 0.8127,
"step": 22500
},
{
"epoch": 0.53,
"learning_rate": 4.827577216820538e-05,
"loss": 0.8184,
"step": 23000
},
{
"epoch": 0.54,
"learning_rate": 4.82374321381468e-05,
"loss": 0.817,
"step": 23500
},
{
"epoch": 0.55,
"learning_rate": 4.8199092108088216e-05,
"loss": 0.8196,
"step": 24000
},
{
"epoch": 0.56,
"learning_rate": 4.816075207802963e-05,
"loss": 0.8024,
"step": 24500
},
{
"epoch": 0.57,
"learning_rate": 4.812241204797105e-05,
"loss": 0.8133,
"step": 25000
},
{
"epoch": 0.59,
"learning_rate": 4.808407201791246e-05,
"loss": 0.8056,
"step": 25500
},
{
"epoch": 0.6,
"learning_rate": 4.804573198785388e-05,
"loss": 0.7972,
"step": 26000
},
{
"epoch": 0.61,
"learning_rate": 4.80073919577953e-05,
"loss": 0.8026,
"step": 26500
},
{
"epoch": 0.62,
"learning_rate": 4.7969051927736716e-05,
"loss": 0.7983,
"step": 27000
},
{
"epoch": 0.63,
"learning_rate": 4.793078857773824e-05,
"loss": 0.795,
"step": 27500
},
{
"epoch": 0.64,
"learning_rate": 4.7892448547679666e-05,
"loss": 0.803,
"step": 28000
},
{
"epoch": 0.66,
"learning_rate": 4.7854108517621084e-05,
"loss": 0.7911,
"step": 28500
},
{
"epoch": 0.67,
"learning_rate": 4.7815768487562494e-05,
"loss": 0.7757,
"step": 29000
},
{
"epoch": 0.68,
"learning_rate": 4.777742845750391e-05,
"loss": 0.777,
"step": 29500
},
{
"epoch": 0.69,
"learning_rate": 4.773908842744533e-05,
"loss": 0.7848,
"step": 30000
},
{
"epoch": 0.7,
"learning_rate": 4.770074839738675e-05,
"loss": 0.7834,
"step": 30500
},
{
"epoch": 0.71,
"learning_rate": 4.766240836732816e-05,
"loss": 0.7753,
"step": 31000
},
{
"epoch": 0.72,
"learning_rate": 4.7624068337269576e-05,
"loss": 0.7715,
"step": 31500
},
{
"epoch": 0.74,
"learning_rate": 4.7585728307211e-05,
"loss": 0.7714,
"step": 32000
},
{
"epoch": 0.75,
"learning_rate": 4.754738827715241e-05,
"loss": 0.7736,
"step": 32500
},
{
"epoch": 0.76,
"learning_rate": 4.750904824709383e-05,
"loss": 0.7767,
"step": 33000
},
{
"epoch": 0.77,
"learning_rate": 4.747070821703524e-05,
"loss": 0.7661,
"step": 33500
},
{
"epoch": 0.78,
"learning_rate": 4.7432368186976664e-05,
"loss": 0.7565,
"step": 34000
},
{
"epoch": 0.79,
"learning_rate": 4.739410483697819e-05,
"loss": 0.7515,
"step": 34500
},
{
"epoch": 0.8,
"learning_rate": 4.735584148697973e-05,
"loss": 0.7558,
"step": 35000
},
{
"epoch": 0.82,
"learning_rate": 4.731750145692114e-05,
"loss": 0.7642,
"step": 35500
},
{
"epoch": 0.83,
"learning_rate": 4.727916142686256e-05,
"loss": 0.7626,
"step": 36000
},
{
"epoch": 0.84,
"learning_rate": 4.7240821396803976e-05,
"loss": 0.7627,
"step": 36500
},
{
"epoch": 0.85,
"learning_rate": 4.7202481366745394e-05,
"loss": 0.7495,
"step": 37000
},
{
"epoch": 0.86,
"learning_rate": 4.716414133668681e-05,
"loss": 0.7582,
"step": 37500
},
{
"epoch": 0.87,
"learning_rate": 4.7125877986688344e-05,
"loss": 0.7481,
"step": 38000
},
{
"epoch": 0.88,
"learning_rate": 4.708753795662976e-05,
"loss": 0.7557,
"step": 38500
},
{
"epoch": 0.9,
"learning_rate": 4.704919792657117e-05,
"loss": 0.7384,
"step": 39000
},
{
"epoch": 0.91,
"learning_rate": 4.70108578965126e-05,
"loss": 0.742,
"step": 39500
},
{
"epoch": 0.92,
"learning_rate": 4.697251786645401e-05,
"loss": 0.7388,
"step": 40000
},
{
"epoch": 0.93,
"learning_rate": 4.6934177836395425e-05,
"loss": 0.7503,
"step": 40500
},
{
"epoch": 0.94,
"learning_rate": 4.689583780633684e-05,
"loss": 0.745,
"step": 41000
},
{
"epoch": 0.95,
"learning_rate": 4.685749777627826e-05,
"loss": 0.7228,
"step": 41500
},
{
"epoch": 0.97,
"learning_rate": 4.681915774621968e-05,
"loss": 0.7333,
"step": 42000
},
{
"epoch": 0.98,
"learning_rate": 4.678081771616109e-05,
"loss": 0.7371,
"step": 42500
},
{
"epoch": 0.99,
"learning_rate": 4.674247768610251e-05,
"loss": 0.733,
"step": 43000
},
{
"epoch": 1.0,
"learning_rate": 4.6704137656043925e-05,
"loss": 0.7241,
"step": 43500
},
{
"epoch": 1.0,
"eval_bleu": 56.112,
"eval_gen_len": 17.3165,
"eval_loss": 0.7177675366401672,
"eval_runtime": 7671.1849,
"eval_samples_per_second": 11.342,
"eval_steps_per_second": 1.418,
"step": 43504
},
{
"epoch": 1.01,
"learning_rate": 4.666587430604546e-05,
"loss": 0.7322,
"step": 44000
},
{
"epoch": 1.02,
"learning_rate": 4.6627534275986875e-05,
"loss": 0.7051,
"step": 44500
},
{
"epoch": 1.03,
"learning_rate": 4.658919424592829e-05,
"loss": 0.6935,
"step": 45000
},
{
"epoch": 1.05,
"learning_rate": 4.6550854215869704e-05,
"loss": 0.6831,
"step": 45500
},
{
"epoch": 1.06,
"learning_rate": 4.651259086587124e-05,
"loss": 0.6787,
"step": 46000
},
{
"epoch": 1.07,
"learning_rate": 4.6474327515872776e-05,
"loss": 0.6813,
"step": 46500
},
{
"epoch": 1.08,
"learning_rate": 4.6436140845934424e-05,
"loss": 0.6841,
"step": 47000
},
{
"epoch": 1.09,
"learning_rate": 4.639780081587584e-05,
"loss": 0.6697,
"step": 47500
},
{
"epoch": 1.1,
"learning_rate": 4.635946078581726e-05,
"loss": 0.6605,
"step": 48000
},
{
"epoch": 1.11,
"learning_rate": 4.632112075575868e-05,
"loss": 0.675,
"step": 48500
},
{
"epoch": 1.13,
"learning_rate": 4.6282780725700094e-05,
"loss": 0.6623,
"step": 49000
},
{
"epoch": 1.14,
"learning_rate": 4.6244440695641505e-05,
"loss": 0.6589,
"step": 49500
},
{
"epoch": 1.15,
"learning_rate": 4.620610066558292e-05,
"loss": 0.6476,
"step": 50000
},
{
"epoch": 1.16,
"learning_rate": 4.616776063552434e-05,
"loss": 0.6551,
"step": 50500
},
{
"epoch": 1.17,
"learning_rate": 4.612942060546576e-05,
"loss": 0.6502,
"step": 51000
},
{
"epoch": 1.18,
"learning_rate": 4.6091080575407176e-05,
"loss": 0.6478,
"step": 51500
},
{
"epoch": 1.2,
"learning_rate": 4.605274054534859e-05,
"loss": 0.6485,
"step": 52000
},
{
"epoch": 1.21,
"learning_rate": 4.6014400515290004e-05,
"loss": 0.6412,
"step": 52500
},
{
"epoch": 1.22,
"learning_rate": 4.597606048523142e-05,
"loss": 0.6376,
"step": 53000
},
{
"epoch": 1.23,
"learning_rate": 4.593772045517284e-05,
"loss": 0.6385,
"step": 53500
},
{
"epoch": 1.24,
"learning_rate": 4.589945710517437e-05,
"loss": 0.6366,
"step": 54000
},
{
"epoch": 1.25,
"learning_rate": 4.586111707511579e-05,
"loss": 0.6444,
"step": 54500
},
{
"epoch": 1.26,
"learning_rate": 4.582277704505721e-05,
"loss": 0.6311,
"step": 55000
},
{
"epoch": 1.28,
"learning_rate": 4.578451369505874e-05,
"loss": 0.6306,
"step": 55500
},
{
"epoch": 1.29,
"learning_rate": 4.574617366500016e-05,
"loss": 0.6199,
"step": 56000
},
{
"epoch": 1.3,
"learning_rate": 4.570783363494157e-05,
"loss": 0.6268,
"step": 56500
},
{
"epoch": 1.31,
"learning_rate": 4.566949360488299e-05,
"loss": 0.623,
"step": 57000
},
{
"epoch": 1.32,
"learning_rate": 4.5631153574824404e-05,
"loss": 0.6207,
"step": 57500
},
{
"epoch": 1.33,
"learning_rate": 4.559281354476582e-05,
"loss": 0.6204,
"step": 58000
},
{
"epoch": 1.34,
"learning_rate": 4.555447351470723e-05,
"loss": 0.6215,
"step": 58500
},
{
"epoch": 1.36,
"learning_rate": 4.551613348464866e-05,
"loss": 0.6114,
"step": 59000
},
{
"epoch": 1.37,
"learning_rate": 4.5477793454590075e-05,
"loss": 0.6134,
"step": 59500
},
{
"epoch": 1.38,
"learning_rate": 4.543953010459161e-05,
"loss": 0.6142,
"step": 60000
},
{
"epoch": 1.39,
"learning_rate": 4.5401190074533025e-05,
"loss": 0.6149,
"step": 60500
},
{
"epoch": 1.4,
"learning_rate": 4.5362850044474436e-05,
"loss": 0.6123,
"step": 61000
},
{
"epoch": 1.41,
"learning_rate": 4.5324510014415854e-05,
"loss": 0.6159,
"step": 61500
},
{
"epoch": 1.43,
"learning_rate": 4.5286246664417387e-05,
"loss": 0.6001,
"step": 62000
},
{
"epoch": 1.44,
"learning_rate": 4.5247906634358804e-05,
"loss": 0.6034,
"step": 62500
},
{
"epoch": 1.45,
"learning_rate": 4.520956660430022e-05,
"loss": 0.5999,
"step": 63000
},
{
"epoch": 1.46,
"learning_rate": 4.517122657424164e-05,
"loss": 0.6011,
"step": 63500
},
{
"epoch": 1.47,
"learning_rate": 4.513288654418305e-05,
"loss": 0.6051,
"step": 64000
},
{
"epoch": 1.48,
"learning_rate": 4.509454651412447e-05,
"loss": 0.6061,
"step": 64500
},
{
"epoch": 1.49,
"learning_rate": 4.5056206484065886e-05,
"loss": 0.598,
"step": 65000
},
{
"epoch": 1.51,
"learning_rate": 4.50178664540073e-05,
"loss": 0.5939,
"step": 65500
},
{
"epoch": 1.52,
"learning_rate": 4.497952642394872e-05,
"loss": 0.5905,
"step": 66000
},
{
"epoch": 1.53,
"learning_rate": 4.494118639389013e-05,
"loss": 0.5968,
"step": 66500
},
{
"epoch": 1.54,
"learning_rate": 4.490284636383155e-05,
"loss": 0.5943,
"step": 67000
},
{
"epoch": 1.55,
"learning_rate": 4.486450633377297e-05,
"loss": 0.6025,
"step": 67500
},
{
"epoch": 1.56,
"learning_rate": 4.4826166303714385e-05,
"loss": 0.5918,
"step": 68000
},
{
"epoch": 1.57,
"learning_rate": 4.47878262736558e-05,
"loss": 0.5983,
"step": 68500
},
{
"epoch": 1.59,
"learning_rate": 4.474948624359721e-05,
"loss": 0.5907,
"step": 69000
},
{
"epoch": 1.6,
"learning_rate": 4.471114621353864e-05,
"loss": 0.5852,
"step": 69500
},
{
"epoch": 1.61,
"learning_rate": 4.4672882863540164e-05,
"loss": 0.5886,
"step": 70000
},
{
"epoch": 1.62,
"learning_rate": 4.463454283348159e-05,
"loss": 0.5895,
"step": 70500
},
{
"epoch": 1.63,
"learning_rate": 4.4596202803423e-05,
"loss": 0.5887,
"step": 71000
},
{
"epoch": 1.64,
"learning_rate": 4.455786277336442e-05,
"loss": 0.5953,
"step": 71500
},
{
"epoch": 1.66,
"learning_rate": 4.451952274330583e-05,
"loss": 0.5888,
"step": 72000
},
{
"epoch": 1.67,
"learning_rate": 4.448118271324725e-05,
"loss": 0.571,
"step": 72500
},
{
"epoch": 1.68,
"learning_rate": 4.444291936324878e-05,
"loss": 0.576,
"step": 73000
},
{
"epoch": 1.69,
"learning_rate": 4.44045793331902e-05,
"loss": 0.5828,
"step": 73500
},
{
"epoch": 1.7,
"learning_rate": 4.436631598319173e-05,
"loss": 0.5875,
"step": 74000
},
{
"epoch": 1.71,
"learning_rate": 4.432797595313315e-05,
"loss": 0.5773,
"step": 74500
},
{
"epoch": 1.72,
"learning_rate": 4.428963592307457e-05,
"loss": 0.5741,
"step": 75000
},
{
"epoch": 1.74,
"learning_rate": 4.425129589301598e-05,
"loss": 0.5721,
"step": 75500
},
{
"epoch": 1.75,
"learning_rate": 4.42129558629574e-05,
"loss": 0.5797,
"step": 76000
},
{
"epoch": 1.76,
"learning_rate": 4.417461583289882e-05,
"loss": 0.5787,
"step": 76500
},
{
"epoch": 1.77,
"learning_rate": 4.4136275802840234e-05,
"loss": 0.5731,
"step": 77000
},
{
"epoch": 1.78,
"learning_rate": 4.409801245284177e-05,
"loss": 0.5642,
"step": 77500
},
{
"epoch": 1.79,
"learning_rate": 4.4059672422783185e-05,
"loss": 0.5653,
"step": 78000
},
{
"epoch": 1.8,
"learning_rate": 4.4021332392724596e-05,
"loss": 0.5663,
"step": 78500
},
{
"epoch": 1.82,
"learning_rate": 4.398299236266601e-05,
"loss": 0.5762,
"step": 79000
},
{
"epoch": 1.83,
"learning_rate": 4.394465233260743e-05,
"loss": 0.5742,
"step": 79500
},
{
"epoch": 1.84,
"learning_rate": 4.390631230254885e-05,
"loss": 0.5739,
"step": 80000
},
{
"epoch": 1.85,
"learning_rate": 4.3867972272490266e-05,
"loss": 0.5686,
"step": 80500
},
{
"epoch": 1.86,
"learning_rate": 4.382963224243168e-05,
"loss": 0.5757,
"step": 81000
},
{
"epoch": 1.87,
"learning_rate": 4.3791292212373095e-05,
"loss": 0.5654,
"step": 81500
},
{
"epoch": 1.88,
"learning_rate": 4.375295218231451e-05,
"loss": 0.5697,
"step": 82000
},
{
"epoch": 1.9,
"learning_rate": 4.371461215225593e-05,
"loss": 0.5582,
"step": 82500
},
{
"epoch": 1.91,
"learning_rate": 4.367627212219735e-05,
"loss": 0.5603,
"step": 83000
},
{
"epoch": 1.92,
"learning_rate": 4.363793209213876e-05,
"loss": 0.5564,
"step": 83500
},
{
"epoch": 1.93,
"learning_rate": 4.359959206208018e-05,
"loss": 0.5682,
"step": 84000
},
{
"epoch": 1.94,
"learning_rate": 4.356132871208171e-05,
"loss": 0.566,
"step": 84500
},
{
"epoch": 1.95,
"learning_rate": 4.3522988682023133e-05,
"loss": 0.5485,
"step": 85000
},
{
"epoch": 1.97,
"learning_rate": 4.3484648651964544e-05,
"loss": 0.5558,
"step": 85500
},
{
"epoch": 1.98,
"learning_rate": 4.344630862190596e-05,
"loss": 0.5592,
"step": 86000
},
{
"epoch": 1.99,
"learning_rate": 4.340796859184737e-05,
"loss": 0.5611,
"step": 86500
},
{
"epoch": 2.0,
"learning_rate": 4.337001196208938e-05,
"loss": 0.5523,
"step": 87000
},
{
"epoch": 2.0,
"eval_bleu": 57.9768,
"eval_gen_len": 17.2796,
"eval_loss": 0.6814723610877991,
"eval_runtime": 7592.1301,
"eval_samples_per_second": 11.46,
"eval_steps_per_second": 1.433,
"step": 87009
}
],
"logging_steps": 500,
"max_steps": 652560,
"num_train_epochs": 15,
"save_steps": 500,
"total_flos": 3.016956704056148e+18,
"trial_name": null,
"trial_params": null
}