Fill-Mask
Transformers
PyTorch
English
bert
Inference Endpoints
MIKA_SafeAeroBERT / trainer_state.json
sequoiaandrade's picture
Upload SafeAeroBERT files
9982f4a
raw
history blame
15.7 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.187168922895821,
"global_step": 13005,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.02,
"learning_rate": 9.986209216279854e-06,
"loss": 1.5736,
"step": 100
},
{
"epoch": 0.03,
"learning_rate": 9.974100235452405e-06,
"loss": 2.2457,
"step": 200
},
{
"epoch": 0.05,
"learning_rate": 9.961823074335689e-06,
"loss": 1.5859,
"step": 300
},
{
"epoch": 0.07,
"learning_rate": 9.948705011772622e-06,
"loss": 1.728,
"step": 400
},
{
"epoch": 0.08,
"learning_rate": 9.938109653548606e-06,
"loss": 1.7661,
"step": 500
},
{
"epoch": 0.1,
"learning_rate": 9.927009754456779e-06,
"loss": 2.3678209948440125e+23,
"step": 600
},
{
"epoch": 0.12,
"learning_rate": 9.910191725529768e-06,
"loss": 0.0,
"step": 700
},
{
"epoch": 0.13,
"learning_rate": 9.893373696602759e-06,
"loss": 0.0,
"step": 800
},
{
"epoch": 0.15,
"learning_rate": 9.876555667675748e-06,
"loss": 0.0,
"step": 900
},
{
"epoch": 0.17,
"learning_rate": 9.85973763874874e-06,
"loss": 0.0,
"step": 1000
},
{
"epoch": 0.18,
"learning_rate": 9.84291960982173e-06,
"loss": 0.0,
"step": 1100
},
{
"epoch": 0.2,
"learning_rate": 9.826101580894721e-06,
"loss": 0.0,
"step": 1200
},
{
"epoch": 0.22,
"learning_rate": 9.80928355196771e-06,
"loss": 0.0,
"step": 1300
},
{
"epoch": 0.24,
"learning_rate": 9.792465523040701e-06,
"loss": 0.0,
"step": 1400
},
{
"epoch": 0.25,
"learning_rate": 9.77564749411369e-06,
"loss": 0.0,
"step": 1500
},
{
"epoch": 0.27,
"learning_rate": 9.758829465186681e-06,
"loss": 0.0,
"step": 1600
},
{
"epoch": 0.29,
"learning_rate": 9.74201143625967e-06,
"loss": 0.0,
"step": 1700
},
{
"epoch": 0.3,
"learning_rate": 9.725193407332661e-06,
"loss": 0.0,
"step": 1800
},
{
"epoch": 0.32,
"learning_rate": 9.70837537840565e-06,
"loss": 0.0,
"step": 1900
},
{
"epoch": 0.34,
"learning_rate": 9.691557349478641e-06,
"loss": 0.0,
"step": 2000
},
{
"epoch": 0.35,
"learning_rate": 9.674739320551632e-06,
"loss": 0.0,
"step": 2100
},
{
"epoch": 0.37,
"learning_rate": 9.657921291624623e-06,
"loss": 0.0,
"step": 2200
},
{
"epoch": 0.39,
"learning_rate": 9.641103262697614e-06,
"loss": 0.0,
"step": 2300
},
{
"epoch": 0.4,
"learning_rate": 9.624285233770603e-06,
"loss": 0.0,
"step": 2400
},
{
"epoch": 0.42,
"learning_rate": 9.607467204843594e-06,
"loss": 0.0,
"step": 2500
},
{
"epoch": 0.44,
"learning_rate": 9.590649175916583e-06,
"loss": 0.0,
"step": 2600
},
{
"epoch": 0.45,
"learning_rate": 9.573831146989574e-06,
"loss": 0.0,
"step": 2700
},
{
"epoch": 0.47,
"learning_rate": 9.557013118062563e-06,
"loss": 0.0,
"step": 2800
},
{
"epoch": 0.49,
"learning_rate": 9.540195089135554e-06,
"loss": 0.0,
"step": 2900
},
{
"epoch": 0.5,
"learning_rate": 9.523377060208543e-06,
"loss": 0.0,
"step": 3000
},
{
"epoch": 0.52,
"learning_rate": 9.506559031281534e-06,
"loss": 0.0,
"step": 3100
},
{
"epoch": 0.54,
"learning_rate": 9.489741002354525e-06,
"loss": 0.0,
"step": 3200
},
{
"epoch": 0.55,
"learning_rate": 9.472922973427516e-06,
"loss": 0.0,
"step": 3300
},
{
"epoch": 0.57,
"learning_rate": 9.456104944500505e-06,
"loss": 0.0,
"step": 3400
},
{
"epoch": 0.59,
"learning_rate": 9.439286915573496e-06,
"loss": 0.0,
"step": 3500
},
{
"epoch": 0.61,
"learning_rate": 9.422468886646485e-06,
"loss": 0.0,
"step": 3600
},
{
"epoch": 0.62,
"learning_rate": 9.405650857719476e-06,
"loss": 0.0,
"step": 3700
},
{
"epoch": 0.64,
"learning_rate": 9.388832828792467e-06,
"loss": 0.0,
"step": 3800
},
{
"epoch": 0.66,
"learning_rate": 9.372014799865456e-06,
"loss": 0.0,
"step": 3900
},
{
"epoch": 0.67,
"learning_rate": 9.355196770938447e-06,
"loss": 0.0,
"step": 4000
},
{
"epoch": 0.69,
"learning_rate": 9.338378742011436e-06,
"loss": 0.0,
"step": 4100
},
{
"epoch": 0.71,
"learning_rate": 9.321560713084427e-06,
"loss": 0.0,
"step": 4200
},
{
"epoch": 0.72,
"learning_rate": 9.304742684157418e-06,
"loss": 0.0,
"step": 4300
},
{
"epoch": 0.74,
"learning_rate": 9.287924655230409e-06,
"loss": 0.0,
"step": 4400
},
{
"epoch": 0.76,
"learning_rate": 9.271106626303398e-06,
"loss": 0.0,
"step": 4500
},
{
"epoch": 0.77,
"learning_rate": 9.254288597376389e-06,
"loss": 0.0,
"step": 4600
},
{
"epoch": 0.79,
"learning_rate": 9.237470568449378e-06,
"loss": 0.0,
"step": 4700
},
{
"epoch": 0.81,
"learning_rate": 9.220652539522369e-06,
"loss": 0.0,
"step": 4800
},
{
"epoch": 0.82,
"learning_rate": 9.203834510595358e-06,
"loss": 0.0,
"step": 4900
},
{
"epoch": 0.84,
"learning_rate": 9.187016481668349e-06,
"loss": 0.0,
"step": 5000
},
{
"epoch": 0.86,
"learning_rate": 9.170198452741338e-06,
"loss": 0.0,
"step": 5100
},
{
"epoch": 0.87,
"learning_rate": 9.153380423814329e-06,
"loss": 0.0,
"step": 5200
},
{
"epoch": 0.89,
"learning_rate": 9.13656239488732e-06,
"loss": 0.0,
"step": 5300
},
{
"epoch": 0.91,
"learning_rate": 9.119744365960311e-06,
"loss": 0.0,
"step": 5400
},
{
"epoch": 0.92,
"learning_rate": 9.102926337033302e-06,
"loss": 0.0,
"step": 5500
},
{
"epoch": 0.94,
"learning_rate": 9.086108308106291e-06,
"loss": 0.0,
"step": 5600
},
{
"epoch": 0.96,
"learning_rate": 9.069290279179282e-06,
"loss": 0.0,
"step": 5700
},
{
"epoch": 0.98,
"learning_rate": 9.052472250252271e-06,
"loss": 0.0,
"step": 5800
},
{
"epoch": 0.99,
"learning_rate": 9.035654221325262e-06,
"loss": 0.0,
"step": 5900
},
{
"epoch": 1.01,
"learning_rate": 9.018836192398251e-06,
"loss": 0.0,
"step": 6000
},
{
"epoch": 1.03,
"learning_rate": 9.002018163471242e-06,
"loss": 0.0,
"step": 6100
},
{
"epoch": 1.04,
"learning_rate": 8.985200134544231e-06,
"loss": 0.0,
"step": 6200
},
{
"epoch": 1.06,
"learning_rate": 8.968382105617222e-06,
"loss": 0.0,
"step": 6300
},
{
"epoch": 1.08,
"learning_rate": 8.951564076690213e-06,
"loss": 0.0,
"step": 6400
},
{
"epoch": 1.09,
"learning_rate": 8.934746047763204e-06,
"loss": 0.0,
"step": 6500
},
{
"epoch": 1.11,
"learning_rate": 8.917928018836193e-06,
"loss": 0.0,
"step": 6600
},
{
"epoch": 1.13,
"learning_rate": 8.901109989909184e-06,
"loss": 0.0,
"step": 6700
},
{
"epoch": 1.14,
"learning_rate": 8.884291960982173e-06,
"loss": 0.0,
"step": 6800
},
{
"epoch": 1.16,
"learning_rate": 8.867473932055164e-06,
"loss": 0.0,
"step": 6900
},
{
"epoch": 1.18,
"learning_rate": 8.850655903128153e-06,
"loss": 0.0,
"step": 7000
},
{
"epoch": 1.19,
"learning_rate": 8.833837874201144e-06,
"loss": 0.0,
"step": 7100
},
{
"epoch": 1.21,
"learning_rate": 8.817019845274135e-06,
"loss": 0.0,
"step": 7200
},
{
"epoch": 1.23,
"learning_rate": 8.800201816347124e-06,
"loss": 0.0,
"step": 7300
},
{
"epoch": 1.24,
"learning_rate": 8.783383787420115e-06,
"loss": 0.0,
"step": 7400
},
{
"epoch": 1.26,
"learning_rate": 8.766565758493106e-06,
"loss": 0.0,
"step": 7500
},
{
"epoch": 1.28,
"learning_rate": 8.749747729566097e-06,
"loss": 0.0,
"step": 7600
},
{
"epoch": 1.29,
"learning_rate": 8.732929700639086e-06,
"loss": 0.0,
"step": 7700
},
{
"epoch": 1.31,
"learning_rate": 8.716111671712077e-06,
"loss": 0.0,
"step": 7800
},
{
"epoch": 1.33,
"learning_rate": 8.699293642785066e-06,
"loss": 0.0,
"step": 7900
},
{
"epoch": 1.35,
"learning_rate": 8.682475613858057e-06,
"loss": 0.0,
"step": 8000
},
{
"epoch": 1.36,
"learning_rate": 8.665657584931046e-06,
"loss": 0.0,
"step": 8100
},
{
"epoch": 1.38,
"learning_rate": 8.648839556004037e-06,
"loss": 0.0,
"step": 8200
},
{
"epoch": 1.4,
"learning_rate": 8.632021527077026e-06,
"loss": 0.0,
"step": 8300
},
{
"epoch": 1.41,
"learning_rate": 8.615203498150017e-06,
"loss": 0.0,
"step": 8400
},
{
"epoch": 1.43,
"learning_rate": 8.598385469223008e-06,
"loss": 0.0,
"step": 8500
},
{
"epoch": 1.45,
"learning_rate": 8.581567440295999e-06,
"loss": 0.0,
"step": 8600
},
{
"epoch": 1.46,
"learning_rate": 8.564749411368988e-06,
"loss": 0.0,
"step": 8700
},
{
"epoch": 1.48,
"learning_rate": 8.547931382441979e-06,
"loss": 0.0,
"step": 8800
},
{
"epoch": 1.5,
"learning_rate": 8.53111335351497e-06,
"loss": 0.0,
"step": 8900
},
{
"epoch": 1.51,
"learning_rate": 8.514295324587959e-06,
"loss": 0.0,
"step": 9000
},
{
"epoch": 1.53,
"learning_rate": 8.49747729566095e-06,
"loss": 0.0,
"step": 9100
},
{
"epoch": 1.55,
"learning_rate": 8.480659266733939e-06,
"loss": 0.0,
"step": 9200
},
{
"epoch": 1.56,
"learning_rate": 8.46384123780693e-06,
"loss": 0.0,
"step": 9300
},
{
"epoch": 1.58,
"learning_rate": 8.447023208879919e-06,
"loss": 0.0,
"step": 9400
},
{
"epoch": 1.6,
"learning_rate": 8.43020517995291e-06,
"loss": 0.0,
"step": 9500
},
{
"epoch": 1.61,
"learning_rate": 8.413387151025901e-06,
"loss": 0.0,
"step": 9600
},
{
"epoch": 1.63,
"learning_rate": 8.396569122098892e-06,
"loss": 0.0,
"step": 9700
},
{
"epoch": 1.65,
"learning_rate": 8.379751093171881e-06,
"loss": 0.0,
"step": 9800
},
{
"epoch": 1.66,
"learning_rate": 8.362933064244872e-06,
"loss": 0.0,
"step": 9900
},
{
"epoch": 1.68,
"learning_rate": 8.346115035317861e-06,
"loss": 0.0,
"step": 10000
},
{
"epoch": 1.7,
"learning_rate": 8.329297006390852e-06,
"loss": 0.0,
"step": 10100
},
{
"epoch": 1.72,
"learning_rate": 8.312478977463841e-06,
"loss": 0.0,
"step": 10200
},
{
"epoch": 1.73,
"learning_rate": 8.295660948536832e-06,
"loss": 0.0,
"step": 10300
},
{
"epoch": 1.75,
"learning_rate": 8.278842919609821e-06,
"loss": 0.0,
"step": 10400
},
{
"epoch": 1.77,
"learning_rate": 8.262024890682812e-06,
"loss": 0.0,
"step": 10500
},
{
"epoch": 1.78,
"learning_rate": 8.245206861755803e-06,
"loss": 0.0,
"step": 10600
},
{
"epoch": 1.8,
"learning_rate": 8.228388832828794e-06,
"loss": 0.0,
"step": 10700
},
{
"epoch": 1.82,
"learning_rate": 8.211570803901785e-06,
"loss": 0.0,
"step": 10800
},
{
"epoch": 1.83,
"learning_rate": 8.194752774974774e-06,
"loss": 0.0,
"step": 10900
},
{
"epoch": 1.85,
"learning_rate": 8.177934746047765e-06,
"loss": 0.0,
"step": 11000
},
{
"epoch": 1.87,
"learning_rate": 8.161116717120754e-06,
"loss": 0.0,
"step": 11100
},
{
"epoch": 1.88,
"learning_rate": 8.144298688193745e-06,
"loss": 0.0,
"step": 11200
},
{
"epoch": 1.9,
"learning_rate": 8.127480659266734e-06,
"loss": 0.0,
"step": 11300
},
{
"epoch": 1.92,
"learning_rate": 8.110662630339725e-06,
"loss": 0.0,
"step": 11400
},
{
"epoch": 1.93,
"learning_rate": 8.093844601412714e-06,
"loss": 0.0,
"step": 11500
},
{
"epoch": 1.95,
"learning_rate": 8.077026572485705e-06,
"loss": 0.0,
"step": 11600
},
{
"epoch": 1.97,
"learning_rate": 8.060208543558696e-06,
"loss": 0.0,
"step": 11700
},
{
"epoch": 1.98,
"learning_rate": 8.043390514631687e-06,
"loss": 0.0,
"step": 11800
},
{
"epoch": 2.0,
"learning_rate": 8.026572485704676e-06,
"loss": 0.0,
"step": 11900
},
{
"epoch": 2.02,
"learning_rate": 8.009754456777667e-06,
"loss": 0.0,
"step": 12000
},
{
"epoch": 2.03,
"learning_rate": 7.992936427850656e-06,
"loss": 0.0,
"step": 12100
},
{
"epoch": 2.05,
"learning_rate": 7.976118398923647e-06,
"loss": 0.0,
"step": 12200
},
{
"epoch": 2.07,
"learning_rate": 7.959300369996638e-06,
"loss": 0.0,
"step": 12300
},
{
"epoch": 2.09,
"learning_rate": 7.942482341069627e-06,
"loss": 0.0,
"step": 12400
},
{
"epoch": 2.1,
"learning_rate": 7.925664312142618e-06,
"loss": 0.0,
"step": 12500
},
{
"epoch": 2.12,
"learning_rate": 7.908846283215607e-06,
"loss": 0.0,
"step": 12600
},
{
"epoch": 2.14,
"learning_rate": 7.892028254288598e-06,
"loss": 0.0,
"step": 12700
},
{
"epoch": 2.15,
"learning_rate": 7.875210225361589e-06,
"loss": 0.0,
"step": 12800
},
{
"epoch": 2.17,
"learning_rate": 7.85839219643458e-06,
"loss": 0.0,
"step": 12900
},
{
"epoch": 2.19,
"learning_rate": 7.841574167507569e-06,
"loss": 0.0,
"step": 13000
}
],
"max_steps": 59460,
"num_train_epochs": 10,
"total_flos": 1.314423778738176e+18,
"trial_name": null,
"trial_params": null
}