|
{ |
|
"best_metric": 0.8581712564304036, |
|
"best_model_checkpoint": "swin-tiny-patch4-window7-224-finetuned-eurosat/checkpoint-66", |
|
"epoch": 10.0, |
|
"eval_steps": 500, |
|
"global_step": 220, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 6.432889938354492, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 0.6765, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 8.764992713928223, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.5793, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.6784565916398714, |
|
"eval_f1": 0.5484840645057965, |
|
"eval_loss": 0.5874345302581787, |
|
"eval_precision": 0.4603033467395912, |
|
"eval_recall": 0.6784565916398714, |
|
"eval_runtime": 293.1777, |
|
"eval_samples_per_second": 1.061, |
|
"eval_steps_per_second": 0.034, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 1.3636363636363638, |
|
"grad_norm": 7.319944381713867, |
|
"learning_rate": 4.797979797979798e-05, |
|
"loss": 0.4587, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 1.8181818181818183, |
|
"grad_norm": 11.48066520690918, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.3711, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.7781350482315113, |
|
"eval_f1": 0.7395292980112433, |
|
"eval_loss": 0.4134717881679535, |
|
"eval_precision": 0.8169282951542143, |
|
"eval_recall": 0.7781350482315113, |
|
"eval_runtime": 15.8701, |
|
"eval_samples_per_second": 19.597, |
|
"eval_steps_per_second": 0.63, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 2.2727272727272725, |
|
"grad_norm": 36.77213668823242, |
|
"learning_rate": 4.292929292929293e-05, |
|
"loss": 0.3655, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 2.7272727272727275, |
|
"grad_norm": 14.163016319274902, |
|
"learning_rate": 4.0404040404040405e-05, |
|
"loss": 0.2961, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.8553054662379421, |
|
"eval_f1": 0.8581712564304036, |
|
"eval_loss": 0.2815697491168976, |
|
"eval_precision": 0.8675973805921082, |
|
"eval_recall": 0.8553054662379421, |
|
"eval_runtime": 16.8336, |
|
"eval_samples_per_second": 18.475, |
|
"eval_steps_per_second": 0.594, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 3.1818181818181817, |
|
"grad_norm": 10.475957870483398, |
|
"learning_rate": 3.787878787878788e-05, |
|
"loss": 0.2839, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 3.6363636363636362, |
|
"grad_norm": 7.20914888381958, |
|
"learning_rate": 3.535353535353535e-05, |
|
"loss": 0.2576, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.7942122186495176, |
|
"eval_f1": 0.7857181245354792, |
|
"eval_loss": 0.2898705303668976, |
|
"eval_precision": 0.7883531432398468, |
|
"eval_recall": 0.7942122186495176, |
|
"eval_runtime": 16.0846, |
|
"eval_samples_per_second": 19.335, |
|
"eval_steps_per_second": 0.622, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 4.090909090909091, |
|
"grad_norm": 8.099700927734375, |
|
"learning_rate": 3.282828282828283e-05, |
|
"loss": 0.2458, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 4.545454545454545, |
|
"grad_norm": 16.755117416381836, |
|
"learning_rate": 3.0303030303030306e-05, |
|
"loss": 0.2473, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"grad_norm": 14.67806339263916, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 0.261, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.8102893890675241, |
|
"eval_f1": 0.8036576399029299, |
|
"eval_loss": 0.24693283438682556, |
|
"eval_precision": 0.8056915007838585, |
|
"eval_recall": 0.8102893890675241, |
|
"eval_runtime": 16.5305, |
|
"eval_samples_per_second": 18.814, |
|
"eval_steps_per_second": 0.605, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 5.454545454545454, |
|
"grad_norm": 19.368623733520508, |
|
"learning_rate": 2.5252525252525256e-05, |
|
"loss": 0.2877, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 5.909090909090909, |
|
"grad_norm": 5.459014892578125, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 0.2559, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.8360128617363344, |
|
"eval_f1": 0.8179357935850551, |
|
"eval_loss": 0.25484344363212585, |
|
"eval_precision": 0.8632228058178503, |
|
"eval_recall": 0.8360128617363344, |
|
"eval_runtime": 16.2848, |
|
"eval_samples_per_second": 19.098, |
|
"eval_steps_per_second": 0.614, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 6.363636363636363, |
|
"grad_norm": 3.7228970527648926, |
|
"learning_rate": 2.0202020202020203e-05, |
|
"loss": 0.2382, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 6.818181818181818, |
|
"grad_norm": 7.972034454345703, |
|
"learning_rate": 1.7676767676767676e-05, |
|
"loss": 0.2249, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.8135048231511254, |
|
"eval_f1": 0.7881662757619652, |
|
"eval_loss": 0.2835286557674408, |
|
"eval_precision": 0.8478531476110869, |
|
"eval_recall": 0.8135048231511254, |
|
"eval_runtime": 17.1131, |
|
"eval_samples_per_second": 18.173, |
|
"eval_steps_per_second": 0.584, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 7.2727272727272725, |
|
"grad_norm": 4.717079162597656, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 0.2424, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 7.7272727272727275, |
|
"grad_norm": 3.11995267868042, |
|
"learning_rate": 1.2626262626262628e-05, |
|
"loss": 0.2242, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.8295819935691319, |
|
"eval_f1": 0.8262048489829058, |
|
"eval_loss": 0.23345188796520233, |
|
"eval_precision": 0.8261307807855373, |
|
"eval_recall": 0.8295819935691319, |
|
"eval_runtime": 17.7042, |
|
"eval_samples_per_second": 17.566, |
|
"eval_steps_per_second": 0.565, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 8.181818181818182, |
|
"grad_norm": 8.103911399841309, |
|
"learning_rate": 1.0101010101010101e-05, |
|
"loss": 0.229, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 8.636363636363637, |
|
"grad_norm": 5.379126071929932, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 0.2215, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.8520900321543409, |
|
"eval_f1": 0.8531774023903234, |
|
"eval_loss": 0.22933121025562286, |
|
"eval_precision": 0.8549399751110901, |
|
"eval_recall": 0.8520900321543409, |
|
"eval_runtime": 16.8963, |
|
"eval_samples_per_second": 18.406, |
|
"eval_steps_per_second": 0.592, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 9.090909090909092, |
|
"grad_norm": 18.202444076538086, |
|
"learning_rate": 5.050505050505051e-06, |
|
"loss": 0.2223, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 9.545454545454545, |
|
"grad_norm": 2.7657723426818848, |
|
"learning_rate": 2.5252525252525253e-06, |
|
"loss": 0.2159, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"grad_norm": 8.540946006774902, |
|
"learning_rate": 0.0, |
|
"loss": 0.2269, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.842443729903537, |
|
"eval_f1": 0.8393214641540072, |
|
"eval_loss": 0.2212739735841751, |
|
"eval_precision": 0.8395802314162165, |
|
"eval_recall": 0.842443729903537, |
|
"eval_runtime": 16.6465, |
|
"eval_samples_per_second": 18.683, |
|
"eval_steps_per_second": 0.601, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 220, |
|
"total_flos": 6.954705718242509e+17, |
|
"train_loss": 0.3014452880079096, |
|
"train_runtime": 4522.4246, |
|
"train_samples_per_second": 6.187, |
|
"train_steps_per_second": 0.049 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 220, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 6.954705718242509e+17, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|