{ "best_metric": null, "best_model_checkpoint": null, "epoch": 30.0, "eval_steps": 50, "global_step": 3000, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.01, "grad_norm": NaN, "learning_rate": 1e-05, "loss": 7.6065, "step": 1 }, { "epoch": 0.02, "grad_norm": NaN, "learning_rate": 1e-05, "loss": 7.1849, "step": 2 }, { "epoch": 0.03, "grad_norm": Infinity, "learning_rate": 1e-05, "loss": 7.8016, "step": 3 }, { "epoch": 0.04, "grad_norm": 127.46443176269531, "learning_rate": 9.996666666666669e-06, "loss": 7.1081, "step": 4 }, { "epoch": 0.05, "grad_norm": Infinity, "learning_rate": 9.996666666666669e-06, "loss": 6.9617, "step": 5 }, { "epoch": 0.06, "grad_norm": 260.1764831542969, "learning_rate": 9.993333333333333e-06, "loss": 8.2903, "step": 6 }, { "epoch": 0.07, "grad_norm": Infinity, "learning_rate": 9.993333333333333e-06, "loss": 6.6702, "step": 7 }, { "epoch": 0.08, "grad_norm": 363.6545715332031, "learning_rate": 9.990000000000001e-06, "loss": 7.608, "step": 8 }, { "epoch": 0.09, "grad_norm": 258.0396728515625, "learning_rate": 9.986666666666667e-06, "loss": 7.4575, "step": 9 }, { "epoch": 0.1, "grad_norm": 743.1029663085938, "learning_rate": 9.983333333333333e-06, "loss": 7.4406, "step": 10 }, { "epoch": 0.11, "grad_norm": 370.84686279296875, "learning_rate": 9.980000000000001e-06, "loss": 6.8073, "step": 11 }, { "epoch": 0.12, "grad_norm": 197.13894653320312, "learning_rate": 9.976666666666667e-06, "loss": 7.1695, "step": 12 }, { "epoch": 0.13, "grad_norm": 325.1319885253906, "learning_rate": 9.973333333333333e-06, "loss": 6.4841, "step": 13 }, { "epoch": 0.14, "grad_norm": 300.36572265625, "learning_rate": 9.970000000000001e-06, "loss": 6.288, "step": 14 }, { "epoch": 0.15, "grad_norm": 80.08112335205078, "learning_rate": 9.966666666666667e-06, "loss": 6.9934, "step": 15 }, { "epoch": 0.16, "grad_norm": 91.23287200927734, "learning_rate": 9.963333333333333e-06, "loss": 6.5881, "step": 16 }, { "epoch": 0.17, "grad_norm": NaN, "learning_rate": 9.963333333333333e-06, "loss": 7.0085, "step": 17 }, { "epoch": 0.18, "grad_norm": 383.0216979980469, "learning_rate": 9.960000000000001e-06, "loss": 6.7479, "step": 18 }, { "epoch": 0.19, "grad_norm": 480.4852294921875, "learning_rate": 9.956666666666667e-06, "loss": 6.7101, "step": 19 }, { "epoch": 0.2, "grad_norm": 227.6482696533203, "learning_rate": 9.953333333333333e-06, "loss": 7.4244, "step": 20 }, { "epoch": 0.21, "grad_norm": 397.3857421875, "learning_rate": 9.950000000000001e-06, "loss": 6.3977, "step": 21 }, { "epoch": 0.22, "grad_norm": 322.843994140625, "learning_rate": 9.946666666666667e-06, "loss": 7.421, "step": 22 }, { "epoch": 0.23, "grad_norm": 358.02386474609375, "learning_rate": 9.943333333333334e-06, "loss": 7.6155, "step": 23 }, { "epoch": 0.24, "grad_norm": 107.66828918457031, "learning_rate": 9.940000000000001e-06, "loss": 6.431, "step": 24 }, { "epoch": 0.25, "grad_norm": Infinity, "learning_rate": 9.940000000000001e-06, "loss": 7.0895, "step": 25 }, { "epoch": 0.26, "grad_norm": 130.93038940429688, "learning_rate": 9.936666666666668e-06, "loss": 6.918, "step": 26 }, { "epoch": 0.27, "grad_norm": 122.16655731201172, "learning_rate": 9.933333333333334e-06, "loss": 6.8893, "step": 27 }, { "epoch": 0.28, "grad_norm": 104.57180786132812, "learning_rate": 9.930000000000001e-06, "loss": 6.5356, "step": 28 }, { "epoch": 0.29, "grad_norm": 302.2137145996094, "learning_rate": 9.926666666666668e-06, "loss": 7.4044, "step": 29 }, { "epoch": 0.3, "grad_norm": 321.1846008300781, "learning_rate": 9.923333333333334e-06, "loss": 6.6629, "step": 30 }, { "epoch": 0.31, "grad_norm": 154.63665771484375, "learning_rate": 9.920000000000002e-06, "loss": 7.3168, "step": 31 }, { "epoch": 0.32, "grad_norm": 831.975830078125, "learning_rate": 9.916666666666668e-06, "loss": 6.8174, "step": 32 }, { "epoch": 0.33, "grad_norm": 142.6387481689453, "learning_rate": 9.913333333333334e-06, "loss": 5.6135, "step": 33 }, { "epoch": 0.34, "grad_norm": 134.61170959472656, "learning_rate": 9.91e-06, "loss": 6.4516, "step": 34 }, { "epoch": 0.35, "grad_norm": 395.0069274902344, "learning_rate": 9.906666666666668e-06, "loss": 6.3609, "step": 35 }, { "epoch": 0.36, "grad_norm": 102.47793579101562, "learning_rate": 9.903333333333334e-06, "loss": 8.7642, "step": 36 }, { "epoch": 0.37, "grad_norm": 107.85749816894531, "learning_rate": 9.9e-06, "loss": 7.3716, "step": 37 }, { "epoch": 0.38, "grad_norm": 152.15777587890625, "learning_rate": 9.896666666666668e-06, "loss": 6.0266, "step": 38 }, { "epoch": 0.39, "grad_norm": 82.06571197509766, "learning_rate": 9.893333333333334e-06, "loss": 6.4774, "step": 39 }, { "epoch": 0.4, "grad_norm": Infinity, "learning_rate": 9.893333333333334e-06, "loss": 6.4157, "step": 40 }, { "epoch": 0.41, "grad_norm": 71.82071685791016, "learning_rate": 9.89e-06, "loss": 6.0823, "step": 41 }, { "epoch": 0.42, "grad_norm": 168.14816284179688, "learning_rate": 9.886666666666668e-06, "loss": 6.3355, "step": 42 }, { "epoch": 0.43, "grad_norm": 167.52796936035156, "learning_rate": 9.883333333333334e-06, "loss": 6.4029, "step": 43 }, { "epoch": 0.44, "grad_norm": 150.2906494140625, "learning_rate": 9.88e-06, "loss": 6.2416, "step": 44 }, { "epoch": 0.45, "grad_norm": 61.973976135253906, "learning_rate": 9.876666666666668e-06, "loss": 6.2382, "step": 45 }, { "epoch": 0.46, "grad_norm": 194.672119140625, "learning_rate": 9.873333333333334e-06, "loss": 6.6222, "step": 46 }, { "epoch": 0.47, "grad_norm": 904.8408813476562, "learning_rate": 9.87e-06, "loss": 6.7116, "step": 47 }, { "epoch": 0.48, "grad_norm": 184.56289672851562, "learning_rate": 9.866666666666668e-06, "loss": 5.9648, "step": 48 }, { "epoch": 0.49, "grad_norm": 515.2801513671875, "learning_rate": 9.863333333333334e-06, "loss": 6.1182, "step": 49 }, { "epoch": 0.5, "grad_norm": 144.19070434570312, "learning_rate": 9.86e-06, "loss": 6.151, "step": 50 }, { "epoch": 0.5, "eval_loss": 6.252912998199463, "eval_map": 0.0, "eval_map_50": 0.0001, "eval_map_75": 0.0, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0001, "eval_map_bead": -1.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0003, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0001, "eval_map_leg warmer": -1.0, "eval_map_medium": 0.0, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0001, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0, "eval_map_small": 0.0, "eval_map_sock": 0.0, "eval_map_tassel": -1.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0004, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0002, "eval_mar_10": 0.0025, "eval_mar_100": 0.0064, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0826, "eval_mar_100_bead": -1.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0571, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0612, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_leg warmer": -1.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.0157, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.0009, "eval_mar_100_sock": 0.0, "eval_mar_100_tassel": -1.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0467, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0148, "eval_mar_medium": 0.0039, "eval_mar_small": 0.0002, "eval_model_preparation_time": 0.0124, "eval_runtime": 17.7401, "eval_samples_per_second": 5.637, "eval_steps_per_second": 1.409, "step": 50 }, { "epoch": 0.51, "grad_norm": 312.8277893066406, "learning_rate": 9.856666666666668e-06, "loss": 6.821, "step": 51 }, { "epoch": 0.52, "grad_norm": 107.58759307861328, "learning_rate": 9.853333333333334e-06, "loss": 6.2479, "step": 52 }, { "epoch": 0.53, "grad_norm": 263.9598693847656, "learning_rate": 9.85e-06, "loss": 5.8982, "step": 53 }, { "epoch": 0.54, "grad_norm": 235.9255828857422, "learning_rate": 9.846666666666668e-06, "loss": 6.0331, "step": 54 }, { "epoch": 0.55, "grad_norm": 422.4007568359375, "learning_rate": 9.843333333333333e-06, "loss": 6.1369, "step": 55 }, { "epoch": 0.56, "grad_norm": 50.6862678527832, "learning_rate": 9.84e-06, "loss": 5.6995, "step": 56 }, { "epoch": 0.57, "grad_norm": 358.001220703125, "learning_rate": 9.836666666666668e-06, "loss": 6.1153, "step": 57 }, { "epoch": 0.58, "grad_norm": 261.4446105957031, "learning_rate": 9.833333333333333e-06, "loss": 6.1624, "step": 58 }, { "epoch": 0.59, "grad_norm": 789.8291625976562, "learning_rate": 9.83e-06, "loss": 5.9405, "step": 59 }, { "epoch": 0.6, "grad_norm": 1186.0316162109375, "learning_rate": 9.826666666666667e-06, "loss": 6.3908, "step": 60 }, { "epoch": 0.61, "grad_norm": 194.93621826171875, "learning_rate": 9.823333333333333e-06, "loss": 6.3639, "step": 61 }, { "epoch": 0.62, "grad_norm": 220.74520874023438, "learning_rate": 9.820000000000001e-06, "loss": 6.0198, "step": 62 }, { "epoch": 0.63, "grad_norm": 458.5269470214844, "learning_rate": 9.816666666666667e-06, "loss": 5.7877, "step": 63 }, { "epoch": 0.64, "grad_norm": 205.4408721923828, "learning_rate": 9.813333333333333e-06, "loss": 5.9099, "step": 64 }, { "epoch": 0.65, "grad_norm": 179.69705200195312, "learning_rate": 9.810000000000001e-06, "loss": 6.0058, "step": 65 }, { "epoch": 0.66, "grad_norm": 85.47879791259766, "learning_rate": 9.806666666666667e-06, "loss": 6.0508, "step": 66 }, { "epoch": 0.67, "grad_norm": 147.2070770263672, "learning_rate": 9.803333333333333e-06, "loss": 6.1019, "step": 67 }, { "epoch": 0.68, "grad_norm": 220.77418518066406, "learning_rate": 9.800000000000001e-06, "loss": 6.293, "step": 68 }, { "epoch": 0.69, "grad_norm": 87.8651351928711, "learning_rate": 9.796666666666667e-06, "loss": 5.9891, "step": 69 }, { "epoch": 0.7, "grad_norm": 86.30547332763672, "learning_rate": 9.793333333333333e-06, "loss": 6.6983, "step": 70 }, { "epoch": 0.71, "grad_norm": 199.9834442138672, "learning_rate": 9.790000000000001e-06, "loss": 6.336, "step": 71 }, { "epoch": 0.72, "grad_norm": 94.88326263427734, "learning_rate": 9.786666666666667e-06, "loss": 5.7409, "step": 72 }, { "epoch": 0.73, "grad_norm": 350.45367431640625, "learning_rate": 9.783333333333335e-06, "loss": 5.6115, "step": 73 }, { "epoch": 0.74, "grad_norm": 407.3158264160156, "learning_rate": 9.780000000000001e-06, "loss": 6.1523, "step": 74 }, { "epoch": 0.75, "grad_norm": 136.42208862304688, "learning_rate": 9.776666666666667e-06, "loss": 5.8014, "step": 75 }, { "epoch": 0.76, "grad_norm": 350.19482421875, "learning_rate": 9.773333333333335e-06, "loss": 6.3259, "step": 76 }, { "epoch": 0.77, "grad_norm": 180.6783905029297, "learning_rate": 9.770000000000001e-06, "loss": 5.5709, "step": 77 }, { "epoch": 0.78, "grad_norm": 116.72332763671875, "learning_rate": 9.766666666666667e-06, "loss": 5.6995, "step": 78 }, { "epoch": 0.79, "grad_norm": 114.10091400146484, "learning_rate": 9.763333333333335e-06, "loss": 5.7428, "step": 79 }, { "epoch": 0.8, "grad_norm": 179.9557342529297, "learning_rate": 9.760000000000001e-06, "loss": 6.1858, "step": 80 }, { "epoch": 0.81, "grad_norm": 233.33485412597656, "learning_rate": 9.756666666666668e-06, "loss": 5.4199, "step": 81 }, { "epoch": 0.82, "grad_norm": 284.7470703125, "learning_rate": 9.753333333333335e-06, "loss": 5.7896, "step": 82 }, { "epoch": 0.83, "grad_norm": 129.3317413330078, "learning_rate": 9.75e-06, "loss": 5.6516, "step": 83 }, { "epoch": 0.84, "grad_norm": 319.6513977050781, "learning_rate": 9.746666666666668e-06, "loss": 5.3085, "step": 84 }, { "epoch": 0.85, "grad_norm": 283.2619934082031, "learning_rate": 9.743333333333335e-06, "loss": 6.0758, "step": 85 }, { "epoch": 0.86, "grad_norm": 233.68325805664062, "learning_rate": 9.74e-06, "loss": 5.9875, "step": 86 }, { "epoch": 0.87, "grad_norm": 91.7517318725586, "learning_rate": 9.736666666666668e-06, "loss": 5.9359, "step": 87 }, { "epoch": 0.88, "grad_norm": 346.4425354003906, "learning_rate": 9.733333333333334e-06, "loss": 5.4443, "step": 88 }, { "epoch": 0.89, "grad_norm": 391.9820861816406, "learning_rate": 9.73e-06, "loss": 5.5552, "step": 89 }, { "epoch": 0.9, "grad_norm": 220.142822265625, "learning_rate": 9.726666666666668e-06, "loss": 5.641, "step": 90 }, { "epoch": 0.91, "grad_norm": 91.38670349121094, "learning_rate": 9.723333333333334e-06, "loss": 5.3614, "step": 91 }, { "epoch": 0.92, "grad_norm": 192.34584045410156, "learning_rate": 9.72e-06, "loss": 5.4784, "step": 92 }, { "epoch": 0.93, "grad_norm": 149.93240356445312, "learning_rate": 9.716666666666668e-06, "loss": 4.9825, "step": 93 }, { "epoch": 0.94, "grad_norm": 146.33152770996094, "learning_rate": 9.713333333333334e-06, "loss": 5.1366, "step": 94 }, { "epoch": 0.95, "grad_norm": 208.05772399902344, "learning_rate": 9.71e-06, "loss": 5.3561, "step": 95 }, { "epoch": 0.96, "grad_norm": 82.96245574951172, "learning_rate": 9.706666666666668e-06, "loss": 5.8718, "step": 96 }, { "epoch": 0.97, "grad_norm": 114.87480926513672, "learning_rate": 9.703333333333334e-06, "loss": 5.5127, "step": 97 }, { "epoch": 0.98, "grad_norm": 51.251712799072266, "learning_rate": 9.7e-06, "loss": 5.8586, "step": 98 }, { "epoch": 0.99, "grad_norm": 68.14344024658203, "learning_rate": 9.696666666666668e-06, "loss": 8.6367, "step": 99 }, { "epoch": 1.0, "grad_norm": 136.92770385742188, "learning_rate": 9.693333333333334e-06, "loss": 6.5354, "step": 100 }, { "epoch": 1.0, "eval_loss": 5.572784900665283, "eval_map": 0.0001, "eval_map_50": 0.0003, "eval_map_75": 0.0001, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_bead": -1.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0001, "eval_map_leg warmer": -1.0, "eval_map_medium": 0.0003, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0043, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0, "eval_map_small": 0.0001, "eval_map_sock": 0.0, "eval_map_tassel": -1.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0007, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0006, "eval_mar_10": 0.0021, "eval_mar_100": 0.004, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_bead": -1.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_leg warmer": -1.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.0955, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.0009, "eval_mar_100_sock": 0.0, "eval_mar_100_tassel": -1.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0667, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0037, "eval_mar_medium": 0.0064, "eval_mar_small": 0.0023, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.582, "eval_samples_per_second": 5.382, "eval_steps_per_second": 1.345, "step": 100 }, { "epoch": 1.01, "grad_norm": 122.69412994384766, "learning_rate": 9.69e-06, "loss": 5.1852, "step": 101 }, { "epoch": 1.02, "grad_norm": 132.8516082763672, "learning_rate": 9.686666666666668e-06, "loss": 4.965, "step": 102 }, { "epoch": 1.03, "grad_norm": 231.4666748046875, "learning_rate": 9.683333333333334e-06, "loss": 5.3525, "step": 103 }, { "epoch": 1.04, "grad_norm": 63.403533935546875, "learning_rate": 9.68e-06, "loss": 5.543, "step": 104 }, { "epoch": 1.05, "grad_norm": 101.78760528564453, "learning_rate": 9.676666666666668e-06, "loss": 5.533, "step": 105 }, { "epoch": 1.06, "grad_norm": 1283.5047607421875, "learning_rate": 9.673333333333334e-06, "loss": 5.5533, "step": 106 }, { "epoch": 1.07, "grad_norm": 96.63442993164062, "learning_rate": 9.67e-06, "loss": 5.8626, "step": 107 }, { "epoch": 1.08, "grad_norm": 167.75921630859375, "learning_rate": 9.666666666666667e-06, "loss": 5.4616, "step": 108 }, { "epoch": 1.09, "grad_norm": 66.6249771118164, "learning_rate": 9.663333333333335e-06, "loss": 5.1235, "step": 109 }, { "epoch": 1.1, "grad_norm": 66.75780487060547, "learning_rate": 9.66e-06, "loss": 5.0813, "step": 110 }, { "epoch": 1.11, "grad_norm": 54.62303924560547, "learning_rate": 9.656666666666667e-06, "loss": 5.6448, "step": 111 }, { "epoch": 1.12, "grad_norm": 72.61183166503906, "learning_rate": 9.653333333333335e-06, "loss": 5.0278, "step": 112 }, { "epoch": 1.13, "grad_norm": 90.81854248046875, "learning_rate": 9.65e-06, "loss": 5.0956, "step": 113 }, { "epoch": 1.1400000000000001, "grad_norm": 154.87535095214844, "learning_rate": 9.646666666666667e-06, "loss": 5.2, "step": 114 }, { "epoch": 1.15, "grad_norm": 333.8710021972656, "learning_rate": 9.643333333333335e-06, "loss": 5.4318, "step": 115 }, { "epoch": 1.16, "grad_norm": 222.442138671875, "learning_rate": 9.640000000000001e-06, "loss": 5.7561, "step": 116 }, { "epoch": 1.17, "grad_norm": 633.295166015625, "learning_rate": 9.636666666666667e-06, "loss": 5.2854, "step": 117 }, { "epoch": 1.18, "grad_norm": 87.82575225830078, "learning_rate": 9.633333333333335e-06, "loss": 5.4189, "step": 118 }, { "epoch": 1.19, "grad_norm": 185.8287353515625, "learning_rate": 9.630000000000001e-06, "loss": 5.0217, "step": 119 }, { "epoch": 1.2, "grad_norm": 251.31787109375, "learning_rate": 9.626666666666667e-06, "loss": 5.3129, "step": 120 }, { "epoch": 1.21, "grad_norm": 96.00883483886719, "learning_rate": 9.623333333333335e-06, "loss": 5.3272, "step": 121 }, { "epoch": 1.22, "grad_norm": 186.9779052734375, "learning_rate": 9.620000000000001e-06, "loss": 5.3058, "step": 122 }, { "epoch": 1.23, "grad_norm": 47.78180694580078, "learning_rate": 9.616666666666667e-06, "loss": 5.1369, "step": 123 }, { "epoch": 1.24, "grad_norm": 75.06971740722656, "learning_rate": 9.613333333333335e-06, "loss": 5.0564, "step": 124 }, { "epoch": 1.25, "grad_norm": 137.00453186035156, "learning_rate": 9.610000000000001e-06, "loss": 5.5225, "step": 125 }, { "epoch": 1.26, "grad_norm": 131.43067932128906, "learning_rate": 9.606666666666667e-06, "loss": 4.9151, "step": 126 }, { "epoch": 1.27, "grad_norm": 101.6932601928711, "learning_rate": 9.603333333333335e-06, "loss": 5.3803, "step": 127 }, { "epoch": 1.28, "grad_norm": 56.3006591796875, "learning_rate": 9.600000000000001e-06, "loss": 6.235, "step": 128 }, { "epoch": 1.29, "grad_norm": 141.53514099121094, "learning_rate": 9.596666666666667e-06, "loss": 4.8844, "step": 129 }, { "epoch": 1.3, "grad_norm": 179.69839477539062, "learning_rate": 9.593333333333335e-06, "loss": 5.3569, "step": 130 }, { "epoch": 1.31, "grad_norm": 122.46894836425781, "learning_rate": 9.59e-06, "loss": 5.4676, "step": 131 }, { "epoch": 1.32, "grad_norm": 222.2217254638672, "learning_rate": 9.586666666666667e-06, "loss": 5.1951, "step": 132 }, { "epoch": 1.33, "grad_norm": 153.22354125976562, "learning_rate": 9.583333333333335e-06, "loss": 5.0124, "step": 133 }, { "epoch": 1.34, "grad_norm": 76.09632110595703, "learning_rate": 9.58e-06, "loss": 5.2748, "step": 134 }, { "epoch": 1.35, "grad_norm": 372.8356018066406, "learning_rate": 9.576666666666668e-06, "loss": 5.2964, "step": 135 }, { "epoch": 1.3599999999999999, "grad_norm": 89.58355712890625, "learning_rate": 9.573333333333334e-06, "loss": 4.3986, "step": 136 }, { "epoch": 1.37, "grad_norm": 161.73101806640625, "learning_rate": 9.57e-06, "loss": 4.8878, "step": 137 }, { "epoch": 1.38, "grad_norm": 90.51305389404297, "learning_rate": 9.566666666666668e-06, "loss": 5.3303, "step": 138 }, { "epoch": 1.3900000000000001, "grad_norm": 38.26066207885742, "learning_rate": 9.563333333333334e-06, "loss": 5.5126, "step": 139 }, { "epoch": 1.4, "grad_norm": 129.59400939941406, "learning_rate": 9.56e-06, "loss": 5.4699, "step": 140 }, { "epoch": 1.41, "grad_norm": 257.953857421875, "learning_rate": 9.556666666666668e-06, "loss": 4.7545, "step": 141 }, { "epoch": 1.42, "grad_norm": 134.5067901611328, "learning_rate": 9.553333333333334e-06, "loss": 4.6847, "step": 142 }, { "epoch": 1.43, "grad_norm": 207.28208923339844, "learning_rate": 9.55e-06, "loss": 4.6076, "step": 143 }, { "epoch": 1.44, "grad_norm": 102.0439682006836, "learning_rate": 9.546666666666668e-06, "loss": 5.2316, "step": 144 }, { "epoch": 1.45, "grad_norm": 53.928001403808594, "learning_rate": 9.543333333333334e-06, "loss": 5.1261, "step": 145 }, { "epoch": 1.46, "grad_norm": 151.2694091796875, "learning_rate": 9.54e-06, "loss": 4.7683, "step": 146 }, { "epoch": 1.47, "grad_norm": 82.04322814941406, "learning_rate": 9.536666666666668e-06, "loss": 5.2192, "step": 147 }, { "epoch": 1.48, "grad_norm": 117.7119369506836, "learning_rate": 9.533333333333334e-06, "loss": 5.2774, "step": 148 }, { "epoch": 1.49, "grad_norm": 48.51362609863281, "learning_rate": 9.53e-06, "loss": 5.0791, "step": 149 }, { "epoch": 1.5, "grad_norm": 113.70185089111328, "learning_rate": 9.526666666666668e-06, "loss": 4.9315, "step": 150 }, { "epoch": 1.5, "eval_loss": 4.986169815063477, "eval_map": 0.0002, "eval_map_50": 0.0007, "eval_map_75": 0.0001, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_bead": -1.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0001, "eval_map_leg warmer": -1.0, "eval_map_medium": 0.0005, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0004, "eval_map_shoe": 0.0078, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0, "eval_map_small": 0.0005, "eval_map_sock": 0.0, "eval_map_tassel": -1.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0014, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.001, "eval_mar_10": 0.007, "eval_mar_100": 0.0114, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0174, "eval_mar_100_bead": -1.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_leg warmer": -1.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.02, "eval_mar_100_shoe": 0.3052, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.0096, "eval_mar_100_sock": 0.0, "eval_mar_100_tassel": -1.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.1167, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0064, "eval_mar_medium": 0.0204, "eval_mar_small": 0.0069, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.6939, "eval_samples_per_second": 5.349, "eval_steps_per_second": 1.337, "step": 150 }, { "epoch": 1.51, "grad_norm": 52.550865173339844, "learning_rate": 9.523333333333334e-06, "loss": 5.7605, "step": 151 }, { "epoch": 1.52, "grad_norm": 58.02830505371094, "learning_rate": 9.52e-06, "loss": 5.1517, "step": 152 }, { "epoch": 1.53, "grad_norm": 112.91033935546875, "learning_rate": 9.516666666666668e-06, "loss": 5.5353, "step": 153 }, { "epoch": 1.54, "grad_norm": 94.40341186523438, "learning_rate": 9.513333333333334e-06, "loss": 4.6382, "step": 154 }, { "epoch": 1.55, "grad_norm": 768.4209594726562, "learning_rate": 9.51e-06, "loss": 5.2702, "step": 155 }, { "epoch": 1.56, "grad_norm": 100.7395248413086, "learning_rate": 9.506666666666667e-06, "loss": 4.8767, "step": 156 }, { "epoch": 1.5699999999999998, "grad_norm": 154.4225311279297, "learning_rate": 9.503333333333334e-06, "loss": 4.7994, "step": 157 }, { "epoch": 1.58, "grad_norm": 52.13207244873047, "learning_rate": 9.5e-06, "loss": 4.7785, "step": 158 }, { "epoch": 1.5899999999999999, "grad_norm": 74.33529663085938, "learning_rate": 9.496666666666667e-06, "loss": 4.5953, "step": 159 }, { "epoch": 1.6, "grad_norm": 103.92268371582031, "learning_rate": 9.493333333333334e-06, "loss": 5.5508, "step": 160 }, { "epoch": 1.6099999999999999, "grad_norm": 195.43621826171875, "learning_rate": 9.49e-06, "loss": 4.552, "step": 161 }, { "epoch": 1.62, "grad_norm": 163.6009521484375, "learning_rate": 9.486666666666667e-06, "loss": 4.787, "step": 162 }, { "epoch": 1.63, "grad_norm": 143.98809814453125, "learning_rate": 9.483333333333335e-06, "loss": 4.7234, "step": 163 }, { "epoch": 1.6400000000000001, "grad_norm": 131.05905151367188, "learning_rate": 9.48e-06, "loss": 4.7361, "step": 164 }, { "epoch": 1.65, "grad_norm": 254.59112548828125, "learning_rate": 9.476666666666667e-06, "loss": 4.3839, "step": 165 }, { "epoch": 1.6600000000000001, "grad_norm": 73.29564666748047, "learning_rate": 9.473333333333335e-06, "loss": 5.1592, "step": 166 }, { "epoch": 1.67, "grad_norm": 182.2209930419922, "learning_rate": 9.47e-06, "loss": 4.9556, "step": 167 }, { "epoch": 1.6800000000000002, "grad_norm": 113.6426010131836, "learning_rate": 9.466666666666667e-06, "loss": 4.7259, "step": 168 }, { "epoch": 1.69, "grad_norm": 61.910518646240234, "learning_rate": 9.463333333333335e-06, "loss": 4.4733, "step": 169 }, { "epoch": 1.7, "grad_norm": 146.50169372558594, "learning_rate": 9.460000000000001e-06, "loss": 4.4146, "step": 170 }, { "epoch": 1.71, "grad_norm": 44.86986541748047, "learning_rate": 9.456666666666667e-06, "loss": 4.8887, "step": 171 }, { "epoch": 1.72, "grad_norm": 223.54502868652344, "learning_rate": 9.453333333333335e-06, "loss": 5.2323, "step": 172 }, { "epoch": 1.73, "grad_norm": 170.9695281982422, "learning_rate": 9.450000000000001e-06, "loss": 4.9292, "step": 173 }, { "epoch": 1.74, "grad_norm": 78.88746643066406, "learning_rate": 9.446666666666667e-06, "loss": 4.637, "step": 174 }, { "epoch": 1.75, "grad_norm": 59.41911697387695, "learning_rate": 9.443333333333335e-06, "loss": 4.8008, "step": 175 }, { "epoch": 1.76, "grad_norm": 879.5148315429688, "learning_rate": 9.440000000000001e-06, "loss": 4.0667, "step": 176 }, { "epoch": 1.77, "grad_norm": 61.50287628173828, "learning_rate": 9.436666666666667e-06, "loss": 4.5335, "step": 177 }, { "epoch": 1.78, "grad_norm": 211.1352996826172, "learning_rate": 9.433333333333335e-06, "loss": 4.6751, "step": 178 }, { "epoch": 1.79, "grad_norm": 215.73207092285156, "learning_rate": 9.43e-06, "loss": 4.4507, "step": 179 }, { "epoch": 1.8, "grad_norm": 137.934326171875, "learning_rate": 9.426666666666667e-06, "loss": 4.2733, "step": 180 }, { "epoch": 1.81, "grad_norm": 175.71815490722656, "learning_rate": 9.423333333333335e-06, "loss": 4.0853, "step": 181 }, { "epoch": 1.8199999999999998, "grad_norm": 394.8920593261719, "learning_rate": 9.42e-06, "loss": 4.839, "step": 182 }, { "epoch": 1.83, "grad_norm": 44.15299606323242, "learning_rate": 9.416666666666667e-06, "loss": 6.794, "step": 183 }, { "epoch": 1.8399999999999999, "grad_norm": 101.74703216552734, "learning_rate": 9.413333333333334e-06, "loss": 4.4615, "step": 184 }, { "epoch": 1.85, "grad_norm": 370.3851623535156, "learning_rate": 9.41e-06, "loss": 5.0431, "step": 185 }, { "epoch": 1.8599999999999999, "grad_norm": 76.9601821899414, "learning_rate": 9.406666666666668e-06, "loss": 4.5517, "step": 186 }, { "epoch": 1.87, "grad_norm": 113.7017593383789, "learning_rate": 9.403333333333334e-06, "loss": 4.3416, "step": 187 }, { "epoch": 1.88, "grad_norm": 137.3843231201172, "learning_rate": 9.4e-06, "loss": 5.3743, "step": 188 }, { "epoch": 1.8900000000000001, "grad_norm": 91.61438751220703, "learning_rate": 9.396666666666668e-06, "loss": 4.3817, "step": 189 }, { "epoch": 1.9, "grad_norm": 384.3169250488281, "learning_rate": 9.393333333333334e-06, "loss": 5.057, "step": 190 }, { "epoch": 1.9100000000000001, "grad_norm": 55.682926177978516, "learning_rate": 9.39e-06, "loss": 4.0885, "step": 191 }, { "epoch": 1.92, "grad_norm": 227.63699340820312, "learning_rate": 9.386666666666668e-06, "loss": 4.6449, "step": 192 }, { "epoch": 1.9300000000000002, "grad_norm": 95.81497955322266, "learning_rate": 9.383333333333334e-06, "loss": 4.5374, "step": 193 }, { "epoch": 1.94, "grad_norm": 46.565860748291016, "learning_rate": 9.38e-06, "loss": 5.1939, "step": 194 }, { "epoch": 1.95, "grad_norm": 32.05930709838867, "learning_rate": 9.376666666666668e-06, "loss": 7.3044, "step": 195 }, { "epoch": 1.96, "grad_norm": 218.3511505126953, "learning_rate": 9.373333333333334e-06, "loss": 4.0487, "step": 196 }, { "epoch": 1.97, "grad_norm": 169.2926788330078, "learning_rate": 9.370000000000002e-06, "loss": 4.3815, "step": 197 }, { "epoch": 1.98, "grad_norm": 44.079795837402344, "learning_rate": 9.366666666666668e-06, "loss": 4.4326, "step": 198 }, { "epoch": 1.99, "grad_norm": 72.93194580078125, "learning_rate": 9.363333333333334e-06, "loss": 4.4964, "step": 199 }, { "epoch": 2.0, "grad_norm": 76.61376190185547, "learning_rate": 9.360000000000002e-06, "loss": 4.4734, "step": 200 }, { "epoch": 2.0, "eval_loss": 4.390911102294922, "eval_map": 0.0005, "eval_map_50": 0.0013, "eval_map_75": 0.0003, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_bead": -1.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0001, "eval_map_leg warmer": -1.0, "eval_map_medium": 0.0009, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0196, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0003, "eval_map_small": 0.0013, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0014, "eval_mar_10": 0.0069, "eval_mar_100": 0.0102, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_bead": -1.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_leg warmer": -1.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.3746, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.0443, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0017, "eval_mar_medium": 0.0182, "eval_mar_small": 0.0101, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.1866, "eval_samples_per_second": 5.499, "eval_steps_per_second": 1.375, "step": 200 }, { "epoch": 2.01, "grad_norm": 78.486083984375, "learning_rate": 9.356666666666668e-06, "loss": 4.709, "step": 201 }, { "epoch": 2.02, "grad_norm": 66.67379760742188, "learning_rate": 9.353333333333334e-06, "loss": 4.5429, "step": 202 }, { "epoch": 2.03, "grad_norm": 174.03518676757812, "learning_rate": 9.350000000000002e-06, "loss": 4.0364, "step": 203 }, { "epoch": 2.04, "grad_norm": 109.9896469116211, "learning_rate": 9.346666666666666e-06, "loss": 4.6237, "step": 204 }, { "epoch": 2.05, "grad_norm": 53.46676254272461, "learning_rate": 9.343333333333334e-06, "loss": 4.7986, "step": 205 }, { "epoch": 2.06, "grad_norm": 429.9852600097656, "learning_rate": 9.340000000000002e-06, "loss": 4.2703, "step": 206 }, { "epoch": 2.07, "grad_norm": 448.01007080078125, "learning_rate": 9.336666666666666e-06, "loss": 4.3375, "step": 207 }, { "epoch": 2.08, "grad_norm": 198.3071746826172, "learning_rate": 9.333333333333334e-06, "loss": 3.8154, "step": 208 }, { "epoch": 2.09, "grad_norm": 36.5258674621582, "learning_rate": 9.33e-06, "loss": 4.4657, "step": 209 }, { "epoch": 2.1, "grad_norm": 1557.2879638671875, "learning_rate": 9.326666666666667e-06, "loss": 4.275, "step": 210 }, { "epoch": 2.11, "grad_norm": 67.41043853759766, "learning_rate": 9.323333333333334e-06, "loss": 4.4906, "step": 211 }, { "epoch": 2.12, "grad_norm": 98.17545318603516, "learning_rate": 9.32e-06, "loss": 4.6311, "step": 212 }, { "epoch": 2.13, "grad_norm": 66.64163970947266, "learning_rate": 9.316666666666667e-06, "loss": 3.9437, "step": 213 }, { "epoch": 2.14, "grad_norm": 46.38419723510742, "learning_rate": 9.313333333333335e-06, "loss": 3.8672, "step": 214 }, { "epoch": 2.15, "grad_norm": 52.14957046508789, "learning_rate": 9.31e-06, "loss": 4.2873, "step": 215 }, { "epoch": 2.16, "grad_norm": 99.98846435546875, "learning_rate": 9.306666666666667e-06, "loss": 4.3281, "step": 216 }, { "epoch": 2.17, "grad_norm": 62.75191879272461, "learning_rate": 9.303333333333335e-06, "loss": 4.1664, "step": 217 }, { "epoch": 2.18, "grad_norm": 126.70586395263672, "learning_rate": 9.3e-06, "loss": 4.0687, "step": 218 }, { "epoch": 2.19, "grad_norm": 90.38249969482422, "learning_rate": 9.296666666666667e-06, "loss": 4.996, "step": 219 }, { "epoch": 2.2, "grad_norm": 71.7837142944336, "learning_rate": 9.293333333333335e-06, "loss": 4.7217, "step": 220 }, { "epoch": 2.21, "grad_norm": 61.828067779541016, "learning_rate": 9.29e-06, "loss": 6.3556, "step": 221 }, { "epoch": 2.22, "grad_norm": 122.2561264038086, "learning_rate": 9.286666666666667e-06, "loss": 4.3856, "step": 222 }, { "epoch": 2.23, "grad_norm": 62.386749267578125, "learning_rate": 9.283333333333335e-06, "loss": 4.2684, "step": 223 }, { "epoch": 2.24, "grad_norm": 93.0372314453125, "learning_rate": 9.280000000000001e-06, "loss": 4.7015, "step": 224 }, { "epoch": 2.25, "grad_norm": 120.41262817382812, "learning_rate": 9.276666666666667e-06, "loss": 4.5245, "step": 225 }, { "epoch": 2.26, "grad_norm": 160.79310607910156, "learning_rate": 9.273333333333335e-06, "loss": 4.0581, "step": 226 }, { "epoch": 2.27, "grad_norm": 267.5970764160156, "learning_rate": 9.270000000000001e-06, "loss": 4.4839, "step": 227 }, { "epoch": 2.2800000000000002, "grad_norm": 100.80946350097656, "learning_rate": 9.266666666666667e-06, "loss": 3.8115, "step": 228 }, { "epoch": 2.29, "grad_norm": 90.26026153564453, "learning_rate": 9.263333333333335e-06, "loss": 4.6671, "step": 229 }, { "epoch": 2.3, "grad_norm": 105.542724609375, "learning_rate": 9.260000000000001e-06, "loss": 4.1153, "step": 230 }, { "epoch": 2.31, "grad_norm": 53.55142593383789, "learning_rate": 9.256666666666667e-06, "loss": 3.8209, "step": 231 }, { "epoch": 2.32, "grad_norm": 48.92270278930664, "learning_rate": 9.253333333333333e-06, "loss": 4.8045, "step": 232 }, { "epoch": 2.33, "grad_norm": 132.21261596679688, "learning_rate": 9.250000000000001e-06, "loss": 4.3993, "step": 233 }, { "epoch": 2.34, "grad_norm": 163.0277862548828, "learning_rate": 9.246666666666667e-06, "loss": 3.9196, "step": 234 }, { "epoch": 2.35, "grad_norm": 43.284698486328125, "learning_rate": 9.243333333333333e-06, "loss": 4.1371, "step": 235 }, { "epoch": 2.36, "grad_norm": 212.76791381835938, "learning_rate": 9.240000000000001e-06, "loss": 4.1497, "step": 236 }, { "epoch": 2.37, "grad_norm": 103.4527359008789, "learning_rate": 9.236666666666667e-06, "loss": 4.2641, "step": 237 }, { "epoch": 2.38, "grad_norm": 72.77599334716797, "learning_rate": 9.233333333333334e-06, "loss": 4.361, "step": 238 }, { "epoch": 2.39, "grad_norm": 242.95579528808594, "learning_rate": 9.230000000000001e-06, "loss": 4.2819, "step": 239 }, { "epoch": 2.4, "grad_norm": 1006.5597534179688, "learning_rate": 9.226666666666668e-06, "loss": 4.3534, "step": 240 }, { "epoch": 2.41, "grad_norm": 120.3678207397461, "learning_rate": 9.223333333333334e-06, "loss": 3.8686, "step": 241 }, { "epoch": 2.42, "grad_norm": 55.07821273803711, "learning_rate": 9.220000000000002e-06, "loss": 4.3168, "step": 242 }, { "epoch": 2.43, "grad_norm": 76.17962646484375, "learning_rate": 9.216666666666668e-06, "loss": 3.9099, "step": 243 }, { "epoch": 2.44, "grad_norm": 1369.7435302734375, "learning_rate": 9.213333333333334e-06, "loss": 4.234, "step": 244 }, { "epoch": 2.45, "grad_norm": 711.4883422851562, "learning_rate": 9.210000000000002e-06, "loss": 4.5436, "step": 245 }, { "epoch": 2.46, "grad_norm": 72.07254791259766, "learning_rate": 9.206666666666668e-06, "loss": 3.8754, "step": 246 }, { "epoch": 2.4699999999999998, "grad_norm": 108.594970703125, "learning_rate": 9.203333333333334e-06, "loss": 4.3161, "step": 247 }, { "epoch": 2.48, "grad_norm": 89.5588607788086, "learning_rate": 9.200000000000002e-06, "loss": 4.2675, "step": 248 }, { "epoch": 2.49, "grad_norm": 49.34614944458008, "learning_rate": 9.196666666666668e-06, "loss": 4.5248, "step": 249 }, { "epoch": 2.5, "grad_norm": 198.24911499023438, "learning_rate": 9.193333333333334e-06, "loss": 3.8903, "step": 250 }, { "epoch": 2.5, "eval_loss": 4.025615692138672, "eval_map": 0.0006, "eval_map_50": 0.0017, "eval_map_75": 0.0003, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_bead": -1.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0001, "eval_map_medium": 0.0011, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0239, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0007, "eval_map_small": 0.0011, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0018, "eval_mar_10": 0.0073, "eval_mar_100": 0.0115, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_bead": -1.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.394, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.0765, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0037, "eval_mar_medium": 0.0209, "eval_mar_small": 0.0095, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.3123, "eval_samples_per_second": 5.178, "eval_steps_per_second": 1.295, "step": 250 }, { "epoch": 2.51, "grad_norm": 283.2328186035156, "learning_rate": 9.190000000000002e-06, "loss": 3.7867, "step": 251 }, { "epoch": 2.52, "grad_norm": 165.20071411132812, "learning_rate": 9.186666666666666e-06, "loss": 4.6823, "step": 252 }, { "epoch": 2.5300000000000002, "grad_norm": 1225.5880126953125, "learning_rate": 9.183333333333334e-06, "loss": 4.1469, "step": 253 }, { "epoch": 2.54, "grad_norm": 57.9954948425293, "learning_rate": 9.180000000000002e-06, "loss": 4.3413, "step": 254 }, { "epoch": 2.55, "grad_norm": 79.46754455566406, "learning_rate": 9.176666666666666e-06, "loss": 4.2469, "step": 255 }, { "epoch": 2.56, "grad_norm": 48.03692626953125, "learning_rate": 9.173333333333334e-06, "loss": 6.8534, "step": 256 }, { "epoch": 2.57, "grad_norm": 236.1758270263672, "learning_rate": 9.17e-06, "loss": 3.6108, "step": 257 }, { "epoch": 2.58, "grad_norm": 55.93033218383789, "learning_rate": 9.166666666666666e-06, "loss": 4.0669, "step": 258 }, { "epoch": 2.59, "grad_norm": 83.15673065185547, "learning_rate": 9.163333333333334e-06, "loss": 4.2809, "step": 259 }, { "epoch": 2.6, "grad_norm": 162.99400329589844, "learning_rate": 9.16e-06, "loss": 4.0081, "step": 260 }, { "epoch": 2.61, "grad_norm": 62.763763427734375, "learning_rate": 9.156666666666667e-06, "loss": 4.2227, "step": 261 }, { "epoch": 2.62, "grad_norm": 86.95005798339844, "learning_rate": 9.153333333333334e-06, "loss": 3.6262, "step": 262 }, { "epoch": 2.63, "grad_norm": 125.47496795654297, "learning_rate": 9.15e-06, "loss": 4.3134, "step": 263 }, { "epoch": 2.64, "grad_norm": 66.62216186523438, "learning_rate": 9.146666666666667e-06, "loss": 3.749, "step": 264 }, { "epoch": 2.65, "grad_norm": 177.82305908203125, "learning_rate": 9.143333333333334e-06, "loss": 3.4782, "step": 265 }, { "epoch": 2.66, "grad_norm": 87.8270263671875, "learning_rate": 9.14e-06, "loss": 4.3984, "step": 266 }, { "epoch": 2.67, "grad_norm": 118.46565246582031, "learning_rate": 9.136666666666667e-06, "loss": 2.9796, "step": 267 }, { "epoch": 2.68, "grad_norm": 50.01642990112305, "learning_rate": 9.133333333333335e-06, "loss": 3.8751, "step": 268 }, { "epoch": 2.69, "grad_norm": 67.69536590576172, "learning_rate": 9.13e-06, "loss": 3.5295, "step": 269 }, { "epoch": 2.7, "grad_norm": 593.4517822265625, "learning_rate": 9.126666666666667e-06, "loss": 3.9105, "step": 270 }, { "epoch": 2.71, "grad_norm": 110.75060272216797, "learning_rate": 9.123333333333335e-06, "loss": 3.873, "step": 271 }, { "epoch": 2.7199999999999998, "grad_norm": 620.6453247070312, "learning_rate": 9.12e-06, "loss": 4.492, "step": 272 }, { "epoch": 2.73, "grad_norm": 63.369937896728516, "learning_rate": 9.116666666666667e-06, "loss": 4.0267, "step": 273 }, { "epoch": 2.74, "grad_norm": 92.73516845703125, "learning_rate": 9.113333333333335e-06, "loss": 3.5851, "step": 274 }, { "epoch": 2.75, "grad_norm": 253.90353393554688, "learning_rate": 9.110000000000001e-06, "loss": 3.7349, "step": 275 }, { "epoch": 2.76, "grad_norm": 53.060089111328125, "learning_rate": 9.106666666666667e-06, "loss": 3.6334, "step": 276 }, { "epoch": 2.77, "grad_norm": 91.34034729003906, "learning_rate": 9.103333333333335e-06, "loss": 4.2239, "step": 277 }, { "epoch": 2.7800000000000002, "grad_norm": 50.888458251953125, "learning_rate": 9.100000000000001e-06, "loss": 3.9752, "step": 278 }, { "epoch": 2.79, "grad_norm": 113.77420806884766, "learning_rate": 9.096666666666667e-06, "loss": 3.6606, "step": 279 }, { "epoch": 2.8, "grad_norm": 231.2066192626953, "learning_rate": 9.093333333333333e-06, "loss": 4.058, "step": 280 }, { "epoch": 2.81, "grad_norm": 177.0528106689453, "learning_rate": 9.090000000000001e-06, "loss": 3.8415, "step": 281 }, { "epoch": 2.82, "grad_norm": 94.72674560546875, "learning_rate": 9.086666666666667e-06, "loss": 3.8255, "step": 282 }, { "epoch": 2.83, "grad_norm": 46.68074417114258, "learning_rate": 9.083333333333333e-06, "loss": 4.3094, "step": 283 }, { "epoch": 2.84, "grad_norm": 48.644344329833984, "learning_rate": 9.080000000000001e-06, "loss": 3.7807, "step": 284 }, { "epoch": 2.85, "grad_norm": 157.5529022216797, "learning_rate": 9.076666666666667e-06, "loss": 5.0154, "step": 285 }, { "epoch": 2.86, "grad_norm": 57.19660949707031, "learning_rate": 9.073333333333333e-06, "loss": 4.0554, "step": 286 }, { "epoch": 2.87, "grad_norm": 865.3327026367188, "learning_rate": 9.070000000000001e-06, "loss": 4.0107, "step": 287 }, { "epoch": 2.88, "grad_norm": 53.16604995727539, "learning_rate": 9.066666666666667e-06, "loss": 4.2932, "step": 288 }, { "epoch": 2.89, "grad_norm": 712.4611206054688, "learning_rate": 9.063333333333334e-06, "loss": 3.638, "step": 289 }, { "epoch": 2.9, "grad_norm": 421.8879699707031, "learning_rate": 9.060000000000001e-06, "loss": 4.4908, "step": 290 }, { "epoch": 2.91, "grad_norm": 45.219173431396484, "learning_rate": 9.056666666666667e-06, "loss": 3.986, "step": 291 }, { "epoch": 2.92, "grad_norm": 258.6515808105469, "learning_rate": 9.053333333333334e-06, "loss": 3.9271, "step": 292 }, { "epoch": 2.93, "grad_norm": 86.18993377685547, "learning_rate": 9.050000000000001e-06, "loss": 3.2085, "step": 293 }, { "epoch": 2.94, "grad_norm": 39.99657440185547, "learning_rate": 9.046666666666668e-06, "loss": 3.6238, "step": 294 }, { "epoch": 2.95, "grad_norm": 105.16471862792969, "learning_rate": 9.043333333333334e-06, "loss": 3.7404, "step": 295 }, { "epoch": 2.96, "grad_norm": 46.48087692260742, "learning_rate": 9.040000000000002e-06, "loss": 3.4064, "step": 296 }, { "epoch": 2.9699999999999998, "grad_norm": 95.09687042236328, "learning_rate": 9.036666666666668e-06, "loss": 3.9838, "step": 297 }, { "epoch": 2.98, "grad_norm": 140.98507690429688, "learning_rate": 9.033333333333334e-06, "loss": 4.3444, "step": 298 }, { "epoch": 2.99, "grad_norm": 64.29010009765625, "learning_rate": 9.030000000000002e-06, "loss": 3.928, "step": 299 }, { "epoch": 3.0, "grad_norm": 68.14689636230469, "learning_rate": 9.026666666666666e-06, "loss": 4.134, "step": 300 }, { "epoch": 3.0, "eval_loss": 3.7972939014434814, "eval_map": 0.0009, "eval_map_50": 0.0026, "eval_map_75": 0.0004, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_bead": -1.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0003, "eval_map_medium": 0.0015, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0356, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0011, "eval_map_small": 0.0016, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0025, "eval_mar_10": 0.008, "eval_mar_100": 0.0107, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_bead": -1.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.3657, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.0748, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0025, "eval_mar_medium": 0.0191, "eval_mar_small": 0.0094, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.1734, "eval_samples_per_second": 5.216, "eval_steps_per_second": 1.304, "step": 300 }, { "epoch": 3.01, "grad_norm": 277.7216491699219, "learning_rate": 9.023333333333334e-06, "loss": 3.8971, "step": 301 }, { "epoch": 3.02, "grad_norm": 94.00948333740234, "learning_rate": 9.020000000000002e-06, "loss": 3.7298, "step": 302 }, { "epoch": 3.03, "grad_norm": 394.5757141113281, "learning_rate": 9.016666666666666e-06, "loss": 3.8927, "step": 303 }, { "epoch": 3.04, "grad_norm": 81.53153991699219, "learning_rate": 9.013333333333334e-06, "loss": 3.7959, "step": 304 }, { "epoch": 3.05, "grad_norm": 65.19332885742188, "learning_rate": 9.01e-06, "loss": 3.8399, "step": 305 }, { "epoch": 3.06, "grad_norm": 145.1247100830078, "learning_rate": 9.006666666666666e-06, "loss": 4.4267, "step": 306 }, { "epoch": 3.07, "grad_norm": 61.648216247558594, "learning_rate": 9.003333333333334e-06, "loss": 3.637, "step": 307 }, { "epoch": 3.08, "grad_norm": 135.11834716796875, "learning_rate": 9e-06, "loss": 3.711, "step": 308 }, { "epoch": 3.09, "grad_norm": 64.72627258300781, "learning_rate": 8.996666666666666e-06, "loss": 3.5967, "step": 309 }, { "epoch": 3.1, "grad_norm": 76.9308853149414, "learning_rate": 8.993333333333334e-06, "loss": 3.5184, "step": 310 }, { "epoch": 3.11, "grad_norm": 55.549747467041016, "learning_rate": 8.99e-06, "loss": 4.568, "step": 311 }, { "epoch": 3.12, "grad_norm": 72.6838150024414, "learning_rate": 8.986666666666666e-06, "loss": 3.6806, "step": 312 }, { "epoch": 3.13, "grad_norm": 60.78421401977539, "learning_rate": 8.983333333333334e-06, "loss": 4.0023, "step": 313 }, { "epoch": 3.14, "grad_norm": 52.75045394897461, "learning_rate": 8.98e-06, "loss": 3.9854, "step": 314 }, { "epoch": 3.15, "grad_norm": 77.13304901123047, "learning_rate": 8.976666666666667e-06, "loss": 4.4722, "step": 315 }, { "epoch": 3.16, "grad_norm": 95.91543579101562, "learning_rate": 8.973333333333334e-06, "loss": 3.8806, "step": 316 }, { "epoch": 3.17, "grad_norm": 51.36568832397461, "learning_rate": 8.97e-06, "loss": 3.2948, "step": 317 }, { "epoch": 3.18, "grad_norm": 69.78471374511719, "learning_rate": 8.966666666666667e-06, "loss": 3.52, "step": 318 }, { "epoch": 3.19, "grad_norm": 109.23023986816406, "learning_rate": 8.963333333333334e-06, "loss": 3.9635, "step": 319 }, { "epoch": 3.2, "grad_norm": 105.02713775634766, "learning_rate": 8.96e-06, "loss": 4.0262, "step": 320 }, { "epoch": 3.21, "grad_norm": 83.81452941894531, "learning_rate": 8.956666666666668e-06, "loss": 3.5629, "step": 321 }, { "epoch": 3.22, "grad_norm": 65.57941436767578, "learning_rate": 8.953333333333335e-06, "loss": 3.6986, "step": 322 }, { "epoch": 3.23, "grad_norm": 33.60200119018555, "learning_rate": 8.95e-06, "loss": 3.6248, "step": 323 }, { "epoch": 3.24, "grad_norm": 90.24606323242188, "learning_rate": 8.946666666666669e-06, "loss": 3.2063, "step": 324 }, { "epoch": 3.25, "grad_norm": 75.65552520751953, "learning_rate": 8.943333333333335e-06, "loss": 3.2945, "step": 325 }, { "epoch": 3.26, "grad_norm": 41.21976089477539, "learning_rate": 8.94e-06, "loss": 3.9385, "step": 326 }, { "epoch": 3.27, "grad_norm": 71.94499206542969, "learning_rate": 8.936666666666669e-06, "loss": 4.0783, "step": 327 }, { "epoch": 3.2800000000000002, "grad_norm": 85.01509094238281, "learning_rate": 8.933333333333333e-06, "loss": 3.6585, "step": 328 }, { "epoch": 3.29, "grad_norm": 294.2102966308594, "learning_rate": 8.930000000000001e-06, "loss": 3.5558, "step": 329 }, { "epoch": 3.3, "grad_norm": 48.56850814819336, "learning_rate": 8.926666666666669e-06, "loss": 3.0833, "step": 330 }, { "epoch": 3.31, "grad_norm": 59.283016204833984, "learning_rate": 8.923333333333333e-06, "loss": 4.1257, "step": 331 }, { "epoch": 3.32, "grad_norm": 83.40105438232422, "learning_rate": 8.920000000000001e-06, "loss": 4.0491, "step": 332 }, { "epoch": 3.33, "grad_norm": 76.4682388305664, "learning_rate": 8.916666666666667e-06, "loss": 4.2704, "step": 333 }, { "epoch": 3.34, "grad_norm": 51.526065826416016, "learning_rate": 8.913333333333333e-06, "loss": 3.6495, "step": 334 }, { "epoch": 3.35, "grad_norm": 75.96365356445312, "learning_rate": 8.910000000000001e-06, "loss": 3.6848, "step": 335 }, { "epoch": 3.36, "grad_norm": 115.2063217163086, "learning_rate": 8.906666666666667e-06, "loss": 4.2718, "step": 336 }, { "epoch": 3.37, "grad_norm": 55.06415557861328, "learning_rate": 8.903333333333333e-06, "loss": 2.7969, "step": 337 }, { "epoch": 3.38, "grad_norm": 142.06687927246094, "learning_rate": 8.900000000000001e-06, "loss": 3.4543, "step": 338 }, { "epoch": 3.39, "grad_norm": 55.60197067260742, "learning_rate": 8.896666666666667e-06, "loss": 3.9229, "step": 339 }, { "epoch": 3.4, "grad_norm": 190.800048828125, "learning_rate": 8.893333333333333e-06, "loss": 3.7419, "step": 340 }, { "epoch": 3.41, "grad_norm": 58.63310623168945, "learning_rate": 8.890000000000001e-06, "loss": 6.8334, "step": 341 }, { "epoch": 3.42, "grad_norm": 44.4820556640625, "learning_rate": 8.886666666666667e-06, "loss": 3.8688, "step": 342 }, { "epoch": 3.43, "grad_norm": 107.87239837646484, "learning_rate": 8.883333333333334e-06, "loss": 3.1291, "step": 343 }, { "epoch": 3.44, "grad_norm": 134.27194213867188, "learning_rate": 8.880000000000001e-06, "loss": 3.938, "step": 344 }, { "epoch": 3.45, "grad_norm": 42.519126892089844, "learning_rate": 8.876666666666668e-06, "loss": 3.7767, "step": 345 }, { "epoch": 3.46, "grad_norm": 100.48092651367188, "learning_rate": 8.873333333333334e-06, "loss": 3.2984, "step": 346 }, { "epoch": 3.4699999999999998, "grad_norm": 37.67927551269531, "learning_rate": 8.870000000000001e-06, "loss": 4.3225, "step": 347 }, { "epoch": 3.48, "grad_norm": 164.73155212402344, "learning_rate": 8.866666666666668e-06, "loss": 3.2003, "step": 348 }, { "epoch": 3.49, "grad_norm": 40.24205017089844, "learning_rate": 8.863333333333334e-06, "loss": 3.6904, "step": 349 }, { "epoch": 3.5, "grad_norm": 111.23942565917969, "learning_rate": 8.860000000000002e-06, "loss": 3.3223, "step": 350 }, { "epoch": 3.5, "eval_loss": 3.6229894161224365, "eval_map": 0.0011, "eval_map_50": 0.0025, "eval_map_75": 0.0009, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_bead": -1.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0002, "eval_map_medium": 0.0017, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0428, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0022, "eval_map_small": 0.0024, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0024, "eval_mar_10": 0.0084, "eval_mar_100": 0.0115, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_bead": -1.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.3276, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.1452, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0087, "eval_mar_medium": 0.0186, "eval_mar_small": 0.0099, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.0295, "eval_samples_per_second": 5.546, "eval_steps_per_second": 1.387, "step": 350 }, { "epoch": 3.51, "grad_norm": 93.4109878540039, "learning_rate": 8.856666666666668e-06, "loss": 3.9991, "step": 351 }, { "epoch": 3.52, "grad_norm": 89.45787811279297, "learning_rate": 8.853333333333334e-06, "loss": 6.458, "step": 352 }, { "epoch": 3.5300000000000002, "grad_norm": 110.30984497070312, "learning_rate": 8.85e-06, "loss": 3.9037, "step": 353 }, { "epoch": 3.54, "grad_norm": 74.02133178710938, "learning_rate": 8.846666666666668e-06, "loss": 4.1682, "step": 354 }, { "epoch": 3.55, "grad_norm": 196.9016571044922, "learning_rate": 8.843333333333334e-06, "loss": 4.1498, "step": 355 }, { "epoch": 3.56, "grad_norm": 58.94280242919922, "learning_rate": 8.84e-06, "loss": 4.2098, "step": 356 }, { "epoch": 3.57, "grad_norm": 195.16183471679688, "learning_rate": 8.836666666666668e-06, "loss": 3.2324, "step": 357 }, { "epoch": 3.58, "grad_norm": 71.33100128173828, "learning_rate": 8.833333333333334e-06, "loss": 3.7512, "step": 358 }, { "epoch": 3.59, "grad_norm": 233.12167358398438, "learning_rate": 8.83e-06, "loss": 3.5236, "step": 359 }, { "epoch": 3.6, "grad_norm": 119.70247650146484, "learning_rate": 8.826666666666668e-06, "loss": 3.808, "step": 360 }, { "epoch": 3.61, "grad_norm": 59.80833435058594, "learning_rate": 8.823333333333334e-06, "loss": 4.167, "step": 361 }, { "epoch": 3.62, "grad_norm": 107.9444580078125, "learning_rate": 8.82e-06, "loss": 4.319, "step": 362 }, { "epoch": 3.63, "grad_norm": 63.68690872192383, "learning_rate": 8.816666666666668e-06, "loss": 3.3516, "step": 363 }, { "epoch": 3.64, "grad_norm": 54.64612579345703, "learning_rate": 8.813333333333334e-06, "loss": 3.1097, "step": 364 }, { "epoch": 3.65, "grad_norm": 460.31280517578125, "learning_rate": 8.81e-06, "loss": 3.8116, "step": 365 }, { "epoch": 3.66, "grad_norm": 40.527061462402344, "learning_rate": 8.806666666666668e-06, "loss": 4.1411, "step": 366 }, { "epoch": 3.67, "grad_norm": 80.95730590820312, "learning_rate": 8.803333333333334e-06, "loss": 3.8437, "step": 367 }, { "epoch": 3.68, "grad_norm": 49.37534713745117, "learning_rate": 8.8e-06, "loss": 3.634, "step": 368 }, { "epoch": 3.69, "grad_norm": 85.55028533935547, "learning_rate": 8.796666666666668e-06, "loss": 3.112, "step": 369 }, { "epoch": 3.7, "grad_norm": 46.32170486450195, "learning_rate": 8.793333333333334e-06, "loss": 3.6806, "step": 370 }, { "epoch": 3.71, "grad_norm": 66.6568832397461, "learning_rate": 8.79e-06, "loss": 3.8341, "step": 371 }, { "epoch": 3.7199999999999998, "grad_norm": 101.20774841308594, "learning_rate": 8.786666666666668e-06, "loss": 3.8179, "step": 372 }, { "epoch": 3.73, "grad_norm": 91.48712921142578, "learning_rate": 8.783333333333335e-06, "loss": 4.2512, "step": 373 }, { "epoch": 3.74, "grad_norm": 385.3520202636719, "learning_rate": 8.78e-06, "loss": 3.7428, "step": 374 }, { "epoch": 3.75, "grad_norm": 151.9366455078125, "learning_rate": 8.776666666666668e-06, "loss": 3.5225, "step": 375 }, { "epoch": 3.76, "grad_norm": 99.2162094116211, "learning_rate": 8.773333333333333e-06, "loss": 3.7764, "step": 376 }, { "epoch": 3.77, "grad_norm": 193.45018005371094, "learning_rate": 8.77e-06, "loss": 4.0399, "step": 377 }, { "epoch": 3.7800000000000002, "grad_norm": 67.36671447753906, "learning_rate": 8.766666666666669e-06, "loss": 3.3075, "step": 378 }, { "epoch": 3.79, "grad_norm": 85.83871459960938, "learning_rate": 8.763333333333333e-06, "loss": 3.6914, "step": 379 }, { "epoch": 3.8, "grad_norm": 243.83494567871094, "learning_rate": 8.76e-06, "loss": 3.7816, "step": 380 }, { "epoch": 3.81, "grad_norm": 62.64218521118164, "learning_rate": 8.756666666666667e-06, "loss": 3.4022, "step": 381 }, { "epoch": 3.82, "grad_norm": 114.74481964111328, "learning_rate": 8.753333333333333e-06, "loss": 4.019, "step": 382 }, { "epoch": 3.83, "grad_norm": 422.81011962890625, "learning_rate": 8.750000000000001e-06, "loss": 3.7885, "step": 383 }, { "epoch": 3.84, "grad_norm": 78.24569702148438, "learning_rate": 8.746666666666667e-06, "loss": 4.8121, "step": 384 }, { "epoch": 3.85, "grad_norm": 67.1266860961914, "learning_rate": 8.743333333333333e-06, "loss": 3.4697, "step": 385 }, { "epoch": 3.86, "grad_norm": 125.8707504272461, "learning_rate": 8.740000000000001e-06, "loss": 3.4086, "step": 386 }, { "epoch": 3.87, "grad_norm": 99.7244873046875, "learning_rate": 8.736666666666667e-06, "loss": 3.659, "step": 387 }, { "epoch": 3.88, "grad_norm": 150.54489135742188, "learning_rate": 8.733333333333333e-06, "loss": 3.7281, "step": 388 }, { "epoch": 3.89, "grad_norm": 86.53459930419922, "learning_rate": 8.730000000000001e-06, "loss": 3.8689, "step": 389 }, { "epoch": 3.9, "grad_norm": 110.05010986328125, "learning_rate": 8.726666666666667e-06, "loss": 4.9193, "step": 390 }, { "epoch": 3.91, "grad_norm": 130.7677001953125, "learning_rate": 8.723333333333333e-06, "loss": 3.3931, "step": 391 }, { "epoch": 3.92, "grad_norm": 61.4644660949707, "learning_rate": 8.720000000000001e-06, "loss": 3.6258, "step": 392 }, { "epoch": 3.93, "grad_norm": 87.5998306274414, "learning_rate": 8.716666666666667e-06, "loss": 3.3441, "step": 393 }, { "epoch": 3.94, "grad_norm": 88.80728149414062, "learning_rate": 8.713333333333333e-06, "loss": 2.6346, "step": 394 }, { "epoch": 3.95, "grad_norm": 97.29376220703125, "learning_rate": 8.710000000000001e-06, "loss": 3.1763, "step": 395 }, { "epoch": 3.96, "grad_norm": 88.2977294921875, "learning_rate": 8.706666666666667e-06, "loss": 3.3282, "step": 396 }, { "epoch": 3.9699999999999998, "grad_norm": 43.426109313964844, "learning_rate": 8.703333333333334e-06, "loss": 3.0668, "step": 397 }, { "epoch": 3.98, "grad_norm": 81.98042297363281, "learning_rate": 8.700000000000001e-06, "loss": 4.097, "step": 398 }, { "epoch": 3.99, "grad_norm": 71.05018615722656, "learning_rate": 8.696666666666668e-06, "loss": 2.964, "step": 399 }, { "epoch": 4.0, "grad_norm": 86.87837219238281, "learning_rate": 8.693333333333334e-06, "loss": 3.8292, "step": 400 }, { "epoch": 4.0, "eval_loss": 3.553706169128418, "eval_map": 0.0012, "eval_map_50": 0.0028, "eval_map_75": 0.0009, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0001, "eval_map_medium": 0.0018, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.047, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0008, "eval_map_small": 0.0018, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0024, "eval_mar_10": 0.0083, "eval_mar_100": 0.0109, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.3754, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.0713, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0033, "eval_mar_medium": 0.0175, "eval_mar_small": 0.0118, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.9084, "eval_samples_per_second": 5.289, "eval_steps_per_second": 1.322, "step": 400 }, { "epoch": 4.01, "grad_norm": 191.41966247558594, "learning_rate": 8.690000000000002e-06, "loss": 3.3743, "step": 401 }, { "epoch": 4.02, "grad_norm": 80.21698760986328, "learning_rate": 8.686666666666668e-06, "loss": 3.5757, "step": 402 }, { "epoch": 4.03, "grad_norm": 223.34112548828125, "learning_rate": 8.683333333333334e-06, "loss": 3.3792, "step": 403 }, { "epoch": 4.04, "grad_norm": 83.85723876953125, "learning_rate": 8.68e-06, "loss": 4.2565, "step": 404 }, { "epoch": 4.05, "grad_norm": 142.95468139648438, "learning_rate": 8.676666666666668e-06, "loss": 3.2355, "step": 405 }, { "epoch": 4.06, "grad_norm": 338.5625305175781, "learning_rate": 8.673333333333334e-06, "loss": 3.4083, "step": 406 }, { "epoch": 4.07, "grad_norm": 46.8685302734375, "learning_rate": 8.67e-06, "loss": 3.3451, "step": 407 }, { "epoch": 4.08, "grad_norm": 43.39707565307617, "learning_rate": 8.666666666666668e-06, "loss": 3.6061, "step": 408 }, { "epoch": 4.09, "grad_norm": 1741.31201171875, "learning_rate": 8.663333333333334e-06, "loss": 3.526, "step": 409 }, { "epoch": 4.1, "grad_norm": 155.57106018066406, "learning_rate": 8.66e-06, "loss": 3.1351, "step": 410 }, { "epoch": 4.11, "grad_norm": 86.5200424194336, "learning_rate": 8.656666666666668e-06, "loss": 2.9969, "step": 411 }, { "epoch": 4.12, "grad_norm": 206.48385620117188, "learning_rate": 8.653333333333334e-06, "loss": 3.7043, "step": 412 }, { "epoch": 4.13, "grad_norm": 72.7640380859375, "learning_rate": 8.65e-06, "loss": 3.2477, "step": 413 }, { "epoch": 4.14, "grad_norm": 40.38823699951172, "learning_rate": 8.646666666666668e-06, "loss": 3.4048, "step": 414 }, { "epoch": 4.15, "grad_norm": 750.9967651367188, "learning_rate": 8.643333333333334e-06, "loss": 2.9791, "step": 415 }, { "epoch": 4.16, "grad_norm": 151.1963653564453, "learning_rate": 8.64e-06, "loss": 3.2267, "step": 416 }, { "epoch": 4.17, "grad_norm": 67.0040054321289, "learning_rate": 8.636666666666668e-06, "loss": 3.5658, "step": 417 }, { "epoch": 4.18, "grad_norm": 52.671142578125, "learning_rate": 8.633333333333334e-06, "loss": 3.3049, "step": 418 }, { "epoch": 4.19, "grad_norm": 75.62164306640625, "learning_rate": 8.63e-06, "loss": 3.465, "step": 419 }, { "epoch": 4.2, "grad_norm": 161.37997436523438, "learning_rate": 8.626666666666668e-06, "loss": 4.1625, "step": 420 }, { "epoch": 4.21, "grad_norm": 64.77811431884766, "learning_rate": 8.623333333333334e-06, "loss": 3.8797, "step": 421 }, { "epoch": 4.22, "grad_norm": 37.597076416015625, "learning_rate": 8.62e-06, "loss": 3.2792, "step": 422 }, { "epoch": 4.23, "grad_norm": 39.23066329956055, "learning_rate": 8.616666666666668e-06, "loss": 3.2366, "step": 423 }, { "epoch": 4.24, "grad_norm": 149.1590118408203, "learning_rate": 8.613333333333333e-06, "loss": 3.4089, "step": 424 }, { "epoch": 4.25, "grad_norm": 51.25263977050781, "learning_rate": 8.61e-06, "loss": 3.6749, "step": 425 }, { "epoch": 4.26, "grad_norm": 66.211669921875, "learning_rate": 8.606666666666668e-06, "loss": 3.401, "step": 426 }, { "epoch": 4.27, "grad_norm": 276.60394287109375, "learning_rate": 8.603333333333333e-06, "loss": 3.2749, "step": 427 }, { "epoch": 4.28, "grad_norm": 103.21831512451172, "learning_rate": 8.6e-06, "loss": 3.8845, "step": 428 }, { "epoch": 4.29, "grad_norm": 67.19161224365234, "learning_rate": 8.596666666666667e-06, "loss": 3.1743, "step": 429 }, { "epoch": 4.3, "grad_norm": 38.15357208251953, "learning_rate": 8.593333333333333e-06, "loss": 3.4316, "step": 430 }, { "epoch": 4.31, "grad_norm": 90.95541381835938, "learning_rate": 8.59e-06, "loss": 3.5366, "step": 431 }, { "epoch": 4.32, "grad_norm": 56.30793380737305, "learning_rate": 8.586666666666667e-06, "loss": 3.2313, "step": 432 }, { "epoch": 4.33, "grad_norm": 103.61373901367188, "learning_rate": 8.583333333333333e-06, "loss": 3.8566, "step": 433 }, { "epoch": 4.34, "grad_norm": 100.02061462402344, "learning_rate": 8.580000000000001e-06, "loss": 3.8664, "step": 434 }, { "epoch": 4.35, "grad_norm": 107.3927001953125, "learning_rate": 8.576666666666667e-06, "loss": 3.7702, "step": 435 }, { "epoch": 4.36, "grad_norm": 76.27767944335938, "learning_rate": 8.573333333333333e-06, "loss": 3.431, "step": 436 }, { "epoch": 4.37, "grad_norm": 54.499122619628906, "learning_rate": 8.570000000000001e-06, "loss": 3.7572, "step": 437 }, { "epoch": 4.38, "grad_norm": 56.328369140625, "learning_rate": 8.566666666666667e-06, "loss": 3.5273, "step": 438 }, { "epoch": 4.39, "grad_norm": 145.0022430419922, "learning_rate": 8.563333333333333e-06, "loss": 4.7891, "step": 439 }, { "epoch": 4.4, "grad_norm": 71.48756408691406, "learning_rate": 8.560000000000001e-06, "loss": 2.8543, "step": 440 }, { "epoch": 4.41, "grad_norm": 89.86676788330078, "learning_rate": 8.556666666666667e-06, "loss": 2.7713, "step": 441 }, { "epoch": 4.42, "grad_norm": 257.73883056640625, "learning_rate": 8.553333333333333e-06, "loss": 3.5217, "step": 442 }, { "epoch": 4.43, "grad_norm": 37.92715835571289, "learning_rate": 8.550000000000001e-06, "loss": 3.0815, "step": 443 }, { "epoch": 4.44, "grad_norm": 65.5947494506836, "learning_rate": 8.546666666666667e-06, "loss": 3.1571, "step": 444 }, { "epoch": 4.45, "grad_norm": 94.82684326171875, "learning_rate": 8.543333333333333e-06, "loss": 3.5917, "step": 445 }, { "epoch": 4.46, "grad_norm": 58.26247787475586, "learning_rate": 8.540000000000001e-06, "loss": 3.2993, "step": 446 }, { "epoch": 4.47, "grad_norm": 38.4762077331543, "learning_rate": 8.536666666666667e-06, "loss": 4.2958, "step": 447 }, { "epoch": 4.48, "grad_norm": 268.56451416015625, "learning_rate": 8.533333333333335e-06, "loss": 3.8243, "step": 448 }, { "epoch": 4.49, "grad_norm": 615.0467529296875, "learning_rate": 8.530000000000001e-06, "loss": 4.1324, "step": 449 }, { "epoch": 4.5, "grad_norm": 56.793174743652344, "learning_rate": 8.526666666666667e-06, "loss": 4.3096, "step": 450 }, { "epoch": 4.5, "eval_loss": 3.456611394882202, "eval_map": 0.0012, "eval_map_50": 0.0033, "eval_map_75": 0.0006, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0002, "eval_map_medium": 0.0018, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0488, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0017, "eval_map_small": 0.0027, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0027, "eval_mar_10": 0.0085, "eval_mar_100": 0.0111, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.3209, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.133, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.007, "eval_mar_medium": 0.0179, "eval_mar_small": 0.0098, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.7737, "eval_samples_per_second": 5.327, "eval_steps_per_second": 1.332, "step": 450 }, { "epoch": 4.51, "grad_norm": 89.68521118164062, "learning_rate": 8.523333333333335e-06, "loss": 6.2949, "step": 451 }, { "epoch": 4.52, "grad_norm": 54.010475158691406, "learning_rate": 8.52e-06, "loss": 3.4449, "step": 452 }, { "epoch": 4.53, "grad_norm": 79.97595977783203, "learning_rate": 8.516666666666668e-06, "loss": 3.9551, "step": 453 }, { "epoch": 4.54, "grad_norm": 82.09306335449219, "learning_rate": 8.513333333333335e-06, "loss": 3.6865, "step": 454 }, { "epoch": 4.55, "grad_norm": 101.73391723632812, "learning_rate": 8.51e-06, "loss": 3.8547, "step": 455 }, { "epoch": 4.5600000000000005, "grad_norm": 67.6008071899414, "learning_rate": 8.506666666666668e-06, "loss": 3.3084, "step": 456 }, { "epoch": 4.57, "grad_norm": 117.53973388671875, "learning_rate": 8.503333333333334e-06, "loss": 2.9271, "step": 457 }, { "epoch": 4.58, "grad_norm": 74.84324645996094, "learning_rate": 8.5e-06, "loss": 2.9897, "step": 458 }, { "epoch": 4.59, "grad_norm": 79.19734954833984, "learning_rate": 8.496666666666668e-06, "loss": 4.3276, "step": 459 }, { "epoch": 4.6, "grad_norm": 51.58616638183594, "learning_rate": 8.493333333333334e-06, "loss": 3.7354, "step": 460 }, { "epoch": 4.61, "grad_norm": 113.05933380126953, "learning_rate": 8.49e-06, "loss": 3.5882, "step": 461 }, { "epoch": 4.62, "grad_norm": 75.93956756591797, "learning_rate": 8.486666666666668e-06, "loss": 2.8279, "step": 462 }, { "epoch": 4.63, "grad_norm": 119.72422790527344, "learning_rate": 8.483333333333334e-06, "loss": 3.2628, "step": 463 }, { "epoch": 4.64, "grad_norm": 57.24863815307617, "learning_rate": 8.48e-06, "loss": 3.1254, "step": 464 }, { "epoch": 4.65, "grad_norm": 54.77051544189453, "learning_rate": 8.476666666666668e-06, "loss": 3.1911, "step": 465 }, { "epoch": 4.66, "grad_norm": 65.98894500732422, "learning_rate": 8.473333333333334e-06, "loss": 3.2468, "step": 466 }, { "epoch": 4.67, "grad_norm": 47.837833404541016, "learning_rate": 8.47e-06, "loss": 3.3644, "step": 467 }, { "epoch": 4.68, "grad_norm": 66.17781066894531, "learning_rate": 8.466666666666668e-06, "loss": 3.1178, "step": 468 }, { "epoch": 4.6899999999999995, "grad_norm": 58.05970001220703, "learning_rate": 8.463333333333334e-06, "loss": 3.7289, "step": 469 }, { "epoch": 4.7, "grad_norm": 80.36103820800781, "learning_rate": 8.46e-06, "loss": 3.6393, "step": 470 }, { "epoch": 4.71, "grad_norm": 101.72262573242188, "learning_rate": 8.456666666666668e-06, "loss": 3.4898, "step": 471 }, { "epoch": 4.72, "grad_norm": 172.2316131591797, "learning_rate": 8.453333333333334e-06, "loss": 3.4455, "step": 472 }, { "epoch": 4.73, "grad_norm": 84.90666198730469, "learning_rate": 8.45e-06, "loss": 4.4816, "step": 473 }, { "epoch": 4.74, "grad_norm": 75.94354248046875, "learning_rate": 8.446666666666668e-06, "loss": 3.1827, "step": 474 }, { "epoch": 4.75, "grad_norm": 44.19095230102539, "learning_rate": 8.443333333333334e-06, "loss": 4.2897, "step": 475 }, { "epoch": 4.76, "grad_norm": 61.29315185546875, "learning_rate": 8.44e-06, "loss": 3.6666, "step": 476 }, { "epoch": 4.77, "grad_norm": 208.01097106933594, "learning_rate": 8.436666666666667e-06, "loss": 3.1515, "step": 477 }, { "epoch": 4.78, "grad_norm": 117.27462768554688, "learning_rate": 8.433333333333334e-06, "loss": 3.2377, "step": 478 }, { "epoch": 4.79, "grad_norm": 82.53550720214844, "learning_rate": 8.43e-06, "loss": 3.6185, "step": 479 }, { "epoch": 4.8, "grad_norm": 46.8138542175293, "learning_rate": 8.426666666666667e-06, "loss": 3.1463, "step": 480 }, { "epoch": 4.8100000000000005, "grad_norm": 1140.0433349609375, "learning_rate": 8.423333333333335e-06, "loss": 3.8914, "step": 481 }, { "epoch": 4.82, "grad_norm": 559.6304321289062, "learning_rate": 8.42e-06, "loss": 3.4993, "step": 482 }, { "epoch": 4.83, "grad_norm": 61.93208312988281, "learning_rate": 8.416666666666667e-06, "loss": 2.746, "step": 483 }, { "epoch": 4.84, "grad_norm": 108.8965835571289, "learning_rate": 8.413333333333335e-06, "loss": 3.1314, "step": 484 }, { "epoch": 4.85, "grad_norm": 340.7133483886719, "learning_rate": 8.41e-06, "loss": 3.5109, "step": 485 }, { "epoch": 4.86, "grad_norm": 184.39132690429688, "learning_rate": 8.406666666666667e-06, "loss": 2.8928, "step": 486 }, { "epoch": 4.87, "grad_norm": 178.1990966796875, "learning_rate": 8.403333333333335e-06, "loss": 3.1132, "step": 487 }, { "epoch": 4.88, "grad_norm": 52.84767532348633, "learning_rate": 8.400000000000001e-06, "loss": 3.2481, "step": 488 }, { "epoch": 4.89, "grad_norm": 53.04524612426758, "learning_rate": 8.396666666666667e-06, "loss": 3.7814, "step": 489 }, { "epoch": 4.9, "grad_norm": 88.39616394042969, "learning_rate": 8.393333333333335e-06, "loss": 4.587, "step": 490 }, { "epoch": 4.91, "grad_norm": 79.58772277832031, "learning_rate": 8.390000000000001e-06, "loss": 3.0199, "step": 491 }, { "epoch": 4.92, "grad_norm": 83.44498443603516, "learning_rate": 8.386666666666667e-06, "loss": 4.1199, "step": 492 }, { "epoch": 4.93, "grad_norm": 127.07572937011719, "learning_rate": 8.383333333333335e-06, "loss": 3.3127, "step": 493 }, { "epoch": 4.9399999999999995, "grad_norm": 58.65428924560547, "learning_rate": 8.380000000000001e-06, "loss": 3.5423, "step": 494 }, { "epoch": 4.95, "grad_norm": 75.19646453857422, "learning_rate": 8.376666666666667e-06, "loss": 3.2119, "step": 495 }, { "epoch": 4.96, "grad_norm": 25.58902931213379, "learning_rate": 8.373333333333335e-06, "loss": 6.5783, "step": 496 }, { "epoch": 4.97, "grad_norm": 69.8748779296875, "learning_rate": 8.370000000000001e-06, "loss": 3.257, "step": 497 }, { "epoch": 4.98, "grad_norm": 60.0976448059082, "learning_rate": 8.366666666666667e-06, "loss": 3.535, "step": 498 }, { "epoch": 4.99, "grad_norm": 48.2392578125, "learning_rate": 8.363333333333335e-06, "loss": 4.0285, "step": 499 }, { "epoch": 5.0, "grad_norm": 48.40083694458008, "learning_rate": 8.36e-06, "loss": 3.7201, "step": 500 }, { "epoch": 5.0, "eval_loss": 3.334157705307007, "eval_map": 0.0013, "eval_map_50": 0.0035, "eval_map_75": 0.0007, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0004, "eval_map_medium": 0.0021, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0528, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0023, "eval_map_small": 0.0019, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0024, "eval_mar_10": 0.0102, "eval_mar_100": 0.0141, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4284, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.1513, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0107, "eval_mar_medium": 0.0231, "eval_mar_small": 0.0121, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.1049, "eval_samples_per_second": 5.523, "eval_steps_per_second": 1.381, "step": 500 }, { "epoch": 5.01, "grad_norm": 45.66493225097656, "learning_rate": 8.356666666666667e-06, "loss": 3.7128, "step": 501 }, { "epoch": 5.02, "grad_norm": 87.2018051147461, "learning_rate": 8.353333333333335e-06, "loss": 3.742, "step": 502 }, { "epoch": 5.03, "grad_norm": 209.86376953125, "learning_rate": 8.35e-06, "loss": 3.2284, "step": 503 }, { "epoch": 5.04, "grad_norm": 62.537601470947266, "learning_rate": 8.346666666666668e-06, "loss": 3.009, "step": 504 }, { "epoch": 5.05, "grad_norm": 62.01683044433594, "learning_rate": 8.343333333333334e-06, "loss": 2.9344, "step": 505 }, { "epoch": 5.06, "grad_norm": 112.09513854980469, "learning_rate": 8.34e-06, "loss": 2.9599, "step": 506 }, { "epoch": 5.07, "grad_norm": 94.53915405273438, "learning_rate": 8.336666666666668e-06, "loss": 3.094, "step": 507 }, { "epoch": 5.08, "grad_norm": 66.77739715576172, "learning_rate": 8.333333333333334e-06, "loss": 3.1075, "step": 508 }, { "epoch": 5.09, "grad_norm": 312.80072021484375, "learning_rate": 8.33e-06, "loss": 3.7016, "step": 509 }, { "epoch": 5.1, "grad_norm": 43.05351257324219, "learning_rate": 8.326666666666668e-06, "loss": 3.2276, "step": 510 }, { "epoch": 5.11, "grad_norm": 138.8521270751953, "learning_rate": 8.323333333333334e-06, "loss": 3.4523, "step": 511 }, { "epoch": 5.12, "grad_norm": 82.82918548583984, "learning_rate": 8.32e-06, "loss": 3.3838, "step": 512 }, { "epoch": 5.13, "grad_norm": 190.12460327148438, "learning_rate": 8.316666666666668e-06, "loss": 3.3867, "step": 513 }, { "epoch": 5.14, "grad_norm": 88.63945770263672, "learning_rate": 8.313333333333334e-06, "loss": 3.5807, "step": 514 }, { "epoch": 5.15, "grad_norm": 76.17707824707031, "learning_rate": 8.31e-06, "loss": 3.0321, "step": 515 }, { "epoch": 5.16, "grad_norm": 55.54545593261719, "learning_rate": 8.306666666666668e-06, "loss": 3.8395, "step": 516 }, { "epoch": 5.17, "grad_norm": 114.88622283935547, "learning_rate": 8.303333333333334e-06, "loss": 3.3272, "step": 517 }, { "epoch": 5.18, "grad_norm": 192.8434295654297, "learning_rate": 8.3e-06, "loss": 2.8167, "step": 518 }, { "epoch": 5.19, "grad_norm": 40.361148834228516, "learning_rate": 8.296666666666668e-06, "loss": 5.929, "step": 519 }, { "epoch": 5.2, "grad_norm": 47.811500549316406, "learning_rate": 8.293333333333334e-06, "loss": 3.2289, "step": 520 }, { "epoch": 5.21, "grad_norm": 55.548545837402344, "learning_rate": 8.29e-06, "loss": 3.7026, "step": 521 }, { "epoch": 5.22, "grad_norm": 68.41854095458984, "learning_rate": 8.286666666666668e-06, "loss": 3.2228, "step": 522 }, { "epoch": 5.23, "grad_norm": 57.280330657958984, "learning_rate": 8.283333333333334e-06, "loss": 3.422, "step": 523 }, { "epoch": 5.24, "grad_norm": 52.29472732543945, "learning_rate": 8.28e-06, "loss": 3.5809, "step": 524 }, { "epoch": 5.25, "grad_norm": 83.06446838378906, "learning_rate": 8.276666666666666e-06, "loss": 3.7533, "step": 525 }, { "epoch": 5.26, "grad_norm": 196.70913696289062, "learning_rate": 8.273333333333334e-06, "loss": 3.3223, "step": 526 }, { "epoch": 5.27, "grad_norm": 158.2017059326172, "learning_rate": 8.27e-06, "loss": 2.9393, "step": 527 }, { "epoch": 5.28, "grad_norm": 67.90316009521484, "learning_rate": 8.266666666666667e-06, "loss": 2.3961, "step": 528 }, { "epoch": 5.29, "grad_norm": 77.37881469726562, "learning_rate": 8.263333333333334e-06, "loss": 4.4578, "step": 529 }, { "epoch": 5.3, "grad_norm": 137.28054809570312, "learning_rate": 8.26e-06, "loss": 3.4804, "step": 530 }, { "epoch": 5.31, "grad_norm": 46.0345573425293, "learning_rate": 8.256666666666667e-06, "loss": 2.8139, "step": 531 }, { "epoch": 5.32, "grad_norm": 62.68904113769531, "learning_rate": 8.253333333333334e-06, "loss": 3.6245, "step": 532 }, { "epoch": 5.33, "grad_norm": 39.411094665527344, "learning_rate": 8.25e-06, "loss": 3.7335, "step": 533 }, { "epoch": 5.34, "grad_norm": 69.06774139404297, "learning_rate": 8.246666666666667e-06, "loss": 3.4266, "step": 534 }, { "epoch": 5.35, "grad_norm": 71.19928741455078, "learning_rate": 8.243333333333335e-06, "loss": 3.8317, "step": 535 }, { "epoch": 5.36, "grad_norm": 42.991111755371094, "learning_rate": 8.24e-06, "loss": 3.1485, "step": 536 }, { "epoch": 5.37, "grad_norm": 72.22513580322266, "learning_rate": 8.236666666666667e-06, "loss": 3.5016, "step": 537 }, { "epoch": 5.38, "grad_norm": 41.5932502746582, "learning_rate": 8.233333333333335e-06, "loss": 3.7102, "step": 538 }, { "epoch": 5.39, "grad_norm": 101.79138946533203, "learning_rate": 8.23e-06, "loss": 3.4658, "step": 539 }, { "epoch": 5.4, "grad_norm": 52.59572219848633, "learning_rate": 8.226666666666667e-06, "loss": 3.4883, "step": 540 }, { "epoch": 5.41, "grad_norm": 220.48849487304688, "learning_rate": 8.223333333333335e-06, "loss": 3.6716, "step": 541 }, { "epoch": 5.42, "grad_norm": 128.90115356445312, "learning_rate": 8.220000000000001e-06, "loss": 3.2628, "step": 542 }, { "epoch": 5.43, "grad_norm": 42.49427032470703, "learning_rate": 8.216666666666667e-06, "loss": 2.8595, "step": 543 }, { "epoch": 5.44, "grad_norm": 183.98255920410156, "learning_rate": 8.213333333333335e-06, "loss": 3.28, "step": 544 }, { "epoch": 5.45, "grad_norm": 52.12464904785156, "learning_rate": 8.210000000000001e-06, "loss": 3.1106, "step": 545 }, { "epoch": 5.46, "grad_norm": 301.0237731933594, "learning_rate": 8.206666666666667e-06, "loss": 2.6968, "step": 546 }, { "epoch": 5.47, "grad_norm": 54.9080924987793, "learning_rate": 8.203333333333335e-06, "loss": 3.0984, "step": 547 }, { "epoch": 5.48, "grad_norm": 44.20506286621094, "learning_rate": 8.2e-06, "loss": 3.5284, "step": 548 }, { "epoch": 5.49, "grad_norm": 372.4070129394531, "learning_rate": 8.196666666666667e-06, "loss": 3.1784, "step": 549 }, { "epoch": 5.5, "grad_norm": 48.08243942260742, "learning_rate": 8.193333333333335e-06, "loss": 3.8089, "step": 550 }, { "epoch": 5.5, "eval_loss": 3.2386651039123535, "eval_map": 0.0019, "eval_map_50": 0.0047, "eval_map_75": 0.0014, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0005, "eval_map_medium": 0.0032, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0744, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0041, "eval_map_small": 0.0023, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0029, "eval_mar_10": 0.0119, "eval_mar_100": 0.0159, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4269, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.2252, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0149, "eval_mar_medium": 0.0255, "eval_mar_small": 0.0124, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.9718, "eval_samples_per_second": 5.271, "eval_steps_per_second": 1.318, "step": 550 }, { "epoch": 5.51, "grad_norm": 95.09712982177734, "learning_rate": 8.19e-06, "loss": 3.535, "step": 551 }, { "epoch": 5.52, "grad_norm": 94.72103118896484, "learning_rate": 8.186666666666667e-06, "loss": 3.6851, "step": 552 }, { "epoch": 5.53, "grad_norm": 48.643001556396484, "learning_rate": 8.183333333333333e-06, "loss": 3.8528, "step": 553 }, { "epoch": 5.54, "grad_norm": 64.88797760009766, "learning_rate": 8.18e-06, "loss": 2.6483, "step": 554 }, { "epoch": 5.55, "grad_norm": 37.93514633178711, "learning_rate": 8.176666666666667e-06, "loss": 3.7601, "step": 555 }, { "epoch": 5.5600000000000005, "grad_norm": 753.2659912109375, "learning_rate": 8.173333333333334e-06, "loss": 3.2129, "step": 556 }, { "epoch": 5.57, "grad_norm": 92.9797592163086, "learning_rate": 8.17e-06, "loss": 3.6882, "step": 557 }, { "epoch": 5.58, "grad_norm": 61.563331604003906, "learning_rate": 8.166666666666668e-06, "loss": 3.2781, "step": 558 }, { "epoch": 5.59, "grad_norm": 71.99932098388672, "learning_rate": 8.163333333333334e-06, "loss": 3.3295, "step": 559 }, { "epoch": 5.6, "grad_norm": 110.50749206542969, "learning_rate": 8.16e-06, "loss": 3.436, "step": 560 }, { "epoch": 5.61, "grad_norm": 54.22002410888672, "learning_rate": 8.156666666666668e-06, "loss": 3.075, "step": 561 }, { "epoch": 5.62, "grad_norm": 72.97435760498047, "learning_rate": 8.153333333333334e-06, "loss": 3.7861, "step": 562 }, { "epoch": 5.63, "grad_norm": 125.07279968261719, "learning_rate": 8.15e-06, "loss": 3.8902, "step": 563 }, { "epoch": 5.64, "grad_norm": 64.60013580322266, "learning_rate": 8.146666666666668e-06, "loss": 3.7597, "step": 564 }, { "epoch": 5.65, "grad_norm": 111.85872650146484, "learning_rate": 8.143333333333334e-06, "loss": 3.3411, "step": 565 }, { "epoch": 5.66, "grad_norm": 38.826908111572266, "learning_rate": 8.14e-06, "loss": 4.1137, "step": 566 }, { "epoch": 5.67, "grad_norm": 51.419837951660156, "learning_rate": 8.136666666666668e-06, "loss": 3.645, "step": 567 }, { "epoch": 5.68, "grad_norm": 45.428585052490234, "learning_rate": 8.133333333333334e-06, "loss": 3.3798, "step": 568 }, { "epoch": 5.6899999999999995, "grad_norm": 67.59903717041016, "learning_rate": 8.13e-06, "loss": 3.106, "step": 569 }, { "epoch": 5.7, "grad_norm": 26.472288131713867, "learning_rate": 8.126666666666668e-06, "loss": 3.5541, "step": 570 }, { "epoch": 5.71, "grad_norm": 124.81189727783203, "learning_rate": 8.123333333333334e-06, "loss": 6.3333, "step": 571 }, { "epoch": 5.72, "grad_norm": 38.646484375, "learning_rate": 8.120000000000002e-06, "loss": 4.1439, "step": 572 }, { "epoch": 5.73, "grad_norm": 53.078025817871094, "learning_rate": 8.116666666666666e-06, "loss": 3.6862, "step": 573 }, { "epoch": 5.74, "grad_norm": 107.78236389160156, "learning_rate": 8.113333333333334e-06, "loss": 3.6882, "step": 574 }, { "epoch": 5.75, "grad_norm": 52.41328811645508, "learning_rate": 8.110000000000002e-06, "loss": 3.1888, "step": 575 }, { "epoch": 5.76, "grad_norm": 71.39189147949219, "learning_rate": 8.106666666666666e-06, "loss": 3.2355, "step": 576 }, { "epoch": 5.77, "grad_norm": 98.66508483886719, "learning_rate": 8.103333333333334e-06, "loss": 2.8063, "step": 577 }, { "epoch": 5.78, "grad_norm": 89.52662658691406, "learning_rate": 8.1e-06, "loss": 3.235, "step": 578 }, { "epoch": 5.79, "grad_norm": 99.77098846435547, "learning_rate": 8.096666666666667e-06, "loss": 3.4411, "step": 579 }, { "epoch": 5.8, "grad_norm": 46.946876525878906, "learning_rate": 8.093333333333334e-06, "loss": 2.931, "step": 580 }, { "epoch": 5.8100000000000005, "grad_norm": 4172.5546875, "learning_rate": 8.09e-06, "loss": 4.1566, "step": 581 }, { "epoch": 5.82, "grad_norm": 420.5699157714844, "learning_rate": 8.086666666666667e-06, "loss": 3.084, "step": 582 }, { "epoch": 5.83, "grad_norm": 140.7683868408203, "learning_rate": 8.083333333333334e-06, "loss": 2.9204, "step": 583 }, { "epoch": 5.84, "grad_norm": 228.62660217285156, "learning_rate": 8.08e-06, "loss": 3.8211, "step": 584 }, { "epoch": 5.85, "grad_norm": 64.96949005126953, "learning_rate": 8.076666666666667e-06, "loss": 3.3653, "step": 585 }, { "epoch": 5.86, "grad_norm": 82.66362762451172, "learning_rate": 8.073333333333335e-06, "loss": 4.0633, "step": 586 }, { "epoch": 5.87, "grad_norm": 178.9593963623047, "learning_rate": 8.07e-06, "loss": 3.1544, "step": 587 }, { "epoch": 5.88, "grad_norm": 65.32015991210938, "learning_rate": 8.066666666666667e-06, "loss": 4.0881, "step": 588 }, { "epoch": 5.89, "grad_norm": 42.70023727416992, "learning_rate": 8.063333333333335e-06, "loss": 3.8258, "step": 589 }, { "epoch": 5.9, "grad_norm": 100.78707885742188, "learning_rate": 8.06e-06, "loss": 3.1604, "step": 590 }, { "epoch": 5.91, "grad_norm": 101.1573715209961, "learning_rate": 8.056666666666667e-06, "loss": 3.2044, "step": 591 }, { "epoch": 5.92, "grad_norm": 45.67124557495117, "learning_rate": 8.053333333333335e-06, "loss": 3.0306, "step": 592 }, { "epoch": 5.93, "grad_norm": 2167.3115234375, "learning_rate": 8.050000000000001e-06, "loss": 3.1874, "step": 593 }, { "epoch": 5.9399999999999995, "grad_norm": 61.010562896728516, "learning_rate": 8.046666666666667e-06, "loss": 3.0003, "step": 594 }, { "epoch": 5.95, "grad_norm": 65.2033462524414, "learning_rate": 8.043333333333335e-06, "loss": 3.7872, "step": 595 }, { "epoch": 5.96, "grad_norm": 60.70212936401367, "learning_rate": 8.040000000000001e-06, "loss": 3.5074, "step": 596 }, { "epoch": 5.97, "grad_norm": 72.81919860839844, "learning_rate": 8.036666666666667e-06, "loss": 3.0005, "step": 597 }, { "epoch": 5.98, "grad_norm": 78.28927612304688, "learning_rate": 8.033333333333335e-06, "loss": 3.0071, "step": 598 }, { "epoch": 5.99, "grad_norm": 84.05313873291016, "learning_rate": 8.030000000000001e-06, "loss": 3.4797, "step": 599 }, { "epoch": 6.0, "grad_norm": 74.7823715209961, "learning_rate": 8.026666666666667e-06, "loss": 2.4317, "step": 600 }, { "epoch": 6.0, "eval_loss": 3.2247562408447266, "eval_map": 0.0017, "eval_map_50": 0.0043, "eval_map_75": 0.001, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0003, "eval_map_medium": 0.0027, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.063, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0048, "eval_map_small": 0.0022, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0021, "eval_mar_10": 0.0121, "eval_mar_100": 0.0162, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4187, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.2461, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0149, "eval_mar_medium": 0.0265, "eval_mar_small": 0.0152, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.1316, "eval_samples_per_second": 5.515, "eval_steps_per_second": 1.379, "step": 600 }, { "epoch": 6.01, "grad_norm": 33.948368072509766, "learning_rate": 8.023333333333333e-06, "loss": 3.4715, "step": 601 }, { "epoch": 6.02, "grad_norm": 69.59430694580078, "learning_rate": 8.020000000000001e-06, "loss": 3.6046, "step": 602 }, { "epoch": 6.03, "grad_norm": 45.43846893310547, "learning_rate": 8.016666666666667e-06, "loss": 3.2898, "step": 603 }, { "epoch": 6.04, "grad_norm": 69.39102172851562, "learning_rate": 8.013333333333333e-06, "loss": 3.5658, "step": 604 }, { "epoch": 6.05, "grad_norm": 111.4694595336914, "learning_rate": 8.010000000000001e-06, "loss": 3.2673, "step": 605 }, { "epoch": 6.06, "grad_norm": 91.14551544189453, "learning_rate": 8.006666666666667e-06, "loss": 3.1956, "step": 606 }, { "epoch": 6.07, "grad_norm": 77.3194580078125, "learning_rate": 8.003333333333334e-06, "loss": 3.7651, "step": 607 }, { "epoch": 6.08, "grad_norm": 55.29030227661133, "learning_rate": 8.000000000000001e-06, "loss": 3.2736, "step": 608 }, { "epoch": 6.09, "grad_norm": 91.20716857910156, "learning_rate": 7.996666666666667e-06, "loss": 3.1518, "step": 609 }, { "epoch": 6.1, "grad_norm": 58.23624801635742, "learning_rate": 7.993333333333334e-06, "loss": 3.8073, "step": 610 }, { "epoch": 6.11, "grad_norm": 46.98551559448242, "learning_rate": 7.990000000000001e-06, "loss": 2.9796, "step": 611 }, { "epoch": 6.12, "grad_norm": 45.08614730834961, "learning_rate": 7.986666666666668e-06, "loss": 3.2594, "step": 612 }, { "epoch": 6.13, "grad_norm": 94.72848510742188, "learning_rate": 7.983333333333334e-06, "loss": 2.7317, "step": 613 }, { "epoch": 6.14, "grad_norm": 412.3808288574219, "learning_rate": 7.980000000000002e-06, "loss": 2.7231, "step": 614 }, { "epoch": 6.15, "grad_norm": 90.0866928100586, "learning_rate": 7.976666666666668e-06, "loss": 3.606, "step": 615 }, { "epoch": 6.16, "grad_norm": 525.89892578125, "learning_rate": 7.973333333333334e-06, "loss": 3.3569, "step": 616 }, { "epoch": 6.17, "grad_norm": 89.0801010131836, "learning_rate": 7.970000000000002e-06, "loss": 3.1669, "step": 617 }, { "epoch": 6.18, "grad_norm": 116.96369934082031, "learning_rate": 7.966666666666668e-06, "loss": 3.0284, "step": 618 }, { "epoch": 6.19, "grad_norm": 88.23650360107422, "learning_rate": 7.963333333333334e-06, "loss": 3.1122, "step": 619 }, { "epoch": 6.2, "grad_norm": 96.2500228881836, "learning_rate": 7.960000000000002e-06, "loss": 2.939, "step": 620 }, { "epoch": 6.21, "grad_norm": 54.88013458251953, "learning_rate": 7.956666666666666e-06, "loss": 2.9027, "step": 621 }, { "epoch": 6.22, "grad_norm": 65.7431869506836, "learning_rate": 7.953333333333334e-06, "loss": 4.0036, "step": 622 }, { "epoch": 6.23, "grad_norm": 92.74504852294922, "learning_rate": 7.950000000000002e-06, "loss": 2.7533, "step": 623 }, { "epoch": 6.24, "grad_norm": 80.3800277709961, "learning_rate": 7.946666666666666e-06, "loss": 2.7788, "step": 624 }, { "epoch": 6.25, "grad_norm": 71.12797546386719, "learning_rate": 7.943333333333334e-06, "loss": 3.542, "step": 625 }, { "epoch": 6.26, "grad_norm": 107.99994659423828, "learning_rate": 7.94e-06, "loss": 2.9872, "step": 626 }, { "epoch": 6.27, "grad_norm": 39.21348571777344, "learning_rate": 7.936666666666666e-06, "loss": 3.1236, "step": 627 }, { "epoch": 6.28, "grad_norm": 69.95499420166016, "learning_rate": 7.933333333333334e-06, "loss": 2.902, "step": 628 }, { "epoch": 6.29, "grad_norm": 65.42308044433594, "learning_rate": 7.93e-06, "loss": 3.6561, "step": 629 }, { "epoch": 6.3, "grad_norm": 57.2113151550293, "learning_rate": 7.926666666666666e-06, "loss": 2.9488, "step": 630 }, { "epoch": 6.31, "grad_norm": 52.39177703857422, "learning_rate": 7.923333333333334e-06, "loss": 3.3677, "step": 631 }, { "epoch": 6.32, "grad_norm": 108.10588836669922, "learning_rate": 7.92e-06, "loss": 3.3664, "step": 632 }, { "epoch": 6.33, "grad_norm": 46.88688659667969, "learning_rate": 7.916666666666667e-06, "loss": 6.2263, "step": 633 }, { "epoch": 6.34, "grad_norm": 200.06825256347656, "learning_rate": 7.913333333333334e-06, "loss": 4.2648, "step": 634 }, { "epoch": 6.35, "grad_norm": 58.04877471923828, "learning_rate": 7.91e-06, "loss": 3.7876, "step": 635 }, { "epoch": 6.36, "grad_norm": 33.3233757019043, "learning_rate": 7.906666666666667e-06, "loss": 2.9072, "step": 636 }, { "epoch": 6.37, "grad_norm": 60.75832748413086, "learning_rate": 7.903333333333334e-06, "loss": 3.6723, "step": 637 }, { "epoch": 6.38, "grad_norm": 90.74522399902344, "learning_rate": 7.9e-06, "loss": 3.7423, "step": 638 }, { "epoch": 6.39, "grad_norm": 78.61551666259766, "learning_rate": 7.896666666666667e-06, "loss": 3.3281, "step": 639 }, { "epoch": 6.4, "grad_norm": 61.36286926269531, "learning_rate": 7.893333333333335e-06, "loss": 3.5586, "step": 640 }, { "epoch": 6.41, "grad_norm": 50.790733337402344, "learning_rate": 7.89e-06, "loss": 3.5626, "step": 641 }, { "epoch": 6.42, "grad_norm": 37.15103530883789, "learning_rate": 7.886666666666667e-06, "loss": 3.2037, "step": 642 }, { "epoch": 6.43, "grad_norm": 166.60150146484375, "learning_rate": 7.883333333333335e-06, "loss": 3.0767, "step": 643 }, { "epoch": 6.44, "grad_norm": 94.69403839111328, "learning_rate": 7.88e-06, "loss": 3.8918, "step": 644 }, { "epoch": 6.45, "grad_norm": 61.20313262939453, "learning_rate": 7.876666666666667e-06, "loss": 2.9933, "step": 645 }, { "epoch": 6.46, "grad_norm": 180.88816833496094, "learning_rate": 7.873333333333335e-06, "loss": 3.4105, "step": 646 }, { "epoch": 6.47, "grad_norm": 44.62990951538086, "learning_rate": 7.870000000000001e-06, "loss": 2.922, "step": 647 }, { "epoch": 6.48, "grad_norm": 34.260738372802734, "learning_rate": 7.866666666666667e-06, "loss": 3.4827, "step": 648 }, { "epoch": 6.49, "grad_norm": 53.77005386352539, "learning_rate": 7.863333333333333e-06, "loss": 3.1901, "step": 649 }, { "epoch": 6.5, "grad_norm": 85.77108001708984, "learning_rate": 7.860000000000001e-06, "loss": 3.5663, "step": 650 }, { "epoch": 6.5, "eval_loss": 3.210782527923584, "eval_map": 0.002, "eval_map_50": 0.0052, "eval_map_75": 0.001, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0003, "eval_map_medium": 0.0037, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0756, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0053, "eval_map_small": 0.0021, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0027, "eval_mar_10": 0.0117, "eval_mar_100": 0.0169, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4112, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.2826, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0165, "eval_mar_medium": 0.0278, "eval_mar_small": 0.0143, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.6269, "eval_samples_per_second": 5.369, "eval_steps_per_second": 1.342, "step": 650 }, { "epoch": 6.51, "grad_norm": 36.93003845214844, "learning_rate": 7.856666666666667e-06, "loss": 3.3811, "step": 651 }, { "epoch": 6.52, "grad_norm": 134.51515197753906, "learning_rate": 7.853333333333333e-06, "loss": 2.7113, "step": 652 }, { "epoch": 6.53, "grad_norm": 50.63750457763672, "learning_rate": 7.850000000000001e-06, "loss": 3.7402, "step": 653 }, { "epoch": 6.54, "grad_norm": 65.010986328125, "learning_rate": 7.846666666666667e-06, "loss": 3.1408, "step": 654 }, { "epoch": 6.55, "grad_norm": 105.43413543701172, "learning_rate": 7.843333333333333e-06, "loss": 3.165, "step": 655 }, { "epoch": 6.5600000000000005, "grad_norm": 42.56019973754883, "learning_rate": 7.840000000000001e-06, "loss": 4.8796, "step": 656 }, { "epoch": 6.57, "grad_norm": 64.92215728759766, "learning_rate": 7.836666666666667e-06, "loss": 3.5482, "step": 657 }, { "epoch": 6.58, "grad_norm": 63.88715744018555, "learning_rate": 7.833333333333333e-06, "loss": 3.6981, "step": 658 }, { "epoch": 6.59, "grad_norm": 41.13237762451172, "learning_rate": 7.830000000000001e-06, "loss": 3.5499, "step": 659 }, { "epoch": 6.6, "grad_norm": 63.639549255371094, "learning_rate": 7.826666666666667e-06, "loss": 2.9369, "step": 660 }, { "epoch": 6.61, "grad_norm": 150.4837188720703, "learning_rate": 7.823333333333334e-06, "loss": 3.0257, "step": 661 }, { "epoch": 6.62, "grad_norm": 53.30604553222656, "learning_rate": 7.820000000000001e-06, "loss": 3.1633, "step": 662 }, { "epoch": 6.63, "grad_norm": 49.67282485961914, "learning_rate": 7.816666666666667e-06, "loss": 3.0224, "step": 663 }, { "epoch": 6.64, "grad_norm": 41.25212860107422, "learning_rate": 7.813333333333334e-06, "loss": 3.5288, "step": 664 }, { "epoch": 6.65, "grad_norm": 68.63485717773438, "learning_rate": 7.810000000000001e-06, "loss": 3.3443, "step": 665 }, { "epoch": 6.66, "grad_norm": 56.197078704833984, "learning_rate": 7.806666666666668e-06, "loss": 3.396, "step": 666 }, { "epoch": 6.67, "grad_norm": 53.00736999511719, "learning_rate": 7.803333333333334e-06, "loss": 3.6919, "step": 667 }, { "epoch": 6.68, "grad_norm": 33.044212341308594, "learning_rate": 7.800000000000002e-06, "loss": 6.123, "step": 668 }, { "epoch": 6.6899999999999995, "grad_norm": 29.428056716918945, "learning_rate": 7.796666666666666e-06, "loss": 3.4379, "step": 669 }, { "epoch": 6.7, "grad_norm": 42.47174072265625, "learning_rate": 7.793333333333334e-06, "loss": 3.7745, "step": 670 }, { "epoch": 6.71, "grad_norm": 161.56674194335938, "learning_rate": 7.790000000000002e-06, "loss": 3.5757, "step": 671 }, { "epoch": 6.72, "grad_norm": 136.4636688232422, "learning_rate": 7.786666666666666e-06, "loss": 3.1466, "step": 672 }, { "epoch": 6.73, "grad_norm": 50.513118743896484, "learning_rate": 7.783333333333334e-06, "loss": 2.998, "step": 673 }, { "epoch": 6.74, "grad_norm": 66.1438980102539, "learning_rate": 7.78e-06, "loss": 3.2449, "step": 674 }, { "epoch": 6.75, "grad_norm": 75.55859375, "learning_rate": 7.776666666666666e-06, "loss": 3.0595, "step": 675 }, { "epoch": 6.76, "grad_norm": 62.35960388183594, "learning_rate": 7.773333333333334e-06, "loss": 3.3617, "step": 676 }, { "epoch": 6.77, "grad_norm": 46.259071350097656, "learning_rate": 7.77e-06, "loss": 3.4944, "step": 677 }, { "epoch": 6.78, "grad_norm": 41.350242614746094, "learning_rate": 7.766666666666666e-06, "loss": 4.0088, "step": 678 }, { "epoch": 6.79, "grad_norm": 38.844051361083984, "learning_rate": 7.763333333333334e-06, "loss": 3.8338, "step": 679 }, { "epoch": 6.8, "grad_norm": 46.5444450378418, "learning_rate": 7.76e-06, "loss": 2.9755, "step": 680 }, { "epoch": 6.8100000000000005, "grad_norm": 37.27317810058594, "learning_rate": 7.756666666666666e-06, "loss": 3.8, "step": 681 }, { "epoch": 6.82, "grad_norm": 59.70240783691406, "learning_rate": 7.753333333333334e-06, "loss": 3.3883, "step": 682 }, { "epoch": 6.83, "grad_norm": 43.204627990722656, "learning_rate": 7.75e-06, "loss": 3.4792, "step": 683 }, { "epoch": 6.84, "grad_norm": 66.52877807617188, "learning_rate": 7.746666666666666e-06, "loss": 2.5493, "step": 684 }, { "epoch": 6.85, "grad_norm": 90.19356536865234, "learning_rate": 7.743333333333334e-06, "loss": 3.8276, "step": 685 }, { "epoch": 6.86, "grad_norm": 64.1360855102539, "learning_rate": 7.74e-06, "loss": 2.9513, "step": 686 }, { "epoch": 6.87, "grad_norm": 110.9431381225586, "learning_rate": 7.736666666666667e-06, "loss": 3.0811, "step": 687 }, { "epoch": 6.88, "grad_norm": 88.25829315185547, "learning_rate": 7.733333333333334e-06, "loss": 3.1088, "step": 688 }, { "epoch": 6.89, "grad_norm": 201.80255126953125, "learning_rate": 7.73e-06, "loss": 4.1542, "step": 689 }, { "epoch": 6.9, "grad_norm": 81.24890899658203, "learning_rate": 7.726666666666667e-06, "loss": 3.3346, "step": 690 }, { "epoch": 6.91, "grad_norm": 115.70895385742188, "learning_rate": 7.723333333333334e-06, "loss": 2.5067, "step": 691 }, { "epoch": 6.92, "grad_norm": 61.103668212890625, "learning_rate": 7.72e-06, "loss": 3.0492, "step": 692 }, { "epoch": 6.93, "grad_norm": 102.78062438964844, "learning_rate": 7.716666666666667e-06, "loss": 3.076, "step": 693 }, { "epoch": 6.9399999999999995, "grad_norm": 53.36980438232422, "learning_rate": 7.713333333333335e-06, "loss": 3.7017, "step": 694 }, { "epoch": 6.95, "grad_norm": 57.49666213989258, "learning_rate": 7.71e-06, "loss": 3.189, "step": 695 }, { "epoch": 6.96, "grad_norm": 90.98611450195312, "learning_rate": 7.706666666666669e-06, "loss": 2.5549, "step": 696 }, { "epoch": 6.97, "grad_norm": 69.30762481689453, "learning_rate": 7.703333333333333e-06, "loss": 2.7083, "step": 697 }, { "epoch": 6.98, "grad_norm": 48.18233871459961, "learning_rate": 7.7e-06, "loss": 2.8354, "step": 698 }, { "epoch": 6.99, "grad_norm": 51.55180740356445, "learning_rate": 7.696666666666669e-06, "loss": 3.3475, "step": 699 }, { "epoch": 7.0, "grad_norm": 62.92783737182617, "learning_rate": 7.693333333333333e-06, "loss": 3.3771, "step": 700 }, { "epoch": 7.0, "eval_loss": 3.178760290145874, "eval_map": 0.0024, "eval_map_50": 0.0061, "eval_map_75": 0.0013, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0004, "eval_map_medium": 0.0048, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0937, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0051, "eval_map_small": 0.0025, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0037, "eval_mar_10": 0.0117, "eval_mar_100": 0.017, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4239, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.2713, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0128, "eval_mar_medium": 0.0282, "eval_mar_small": 0.0134, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.7794, "eval_samples_per_second": 5.325, "eval_steps_per_second": 1.331, "step": 700 }, { "epoch": 7.01, "grad_norm": 74.47683715820312, "learning_rate": 7.690000000000001e-06, "loss": 2.8668, "step": 701 }, { "epoch": 7.02, "grad_norm": 128.14476013183594, "learning_rate": 7.686666666666667e-06, "loss": 2.8607, "step": 702 }, { "epoch": 7.03, "grad_norm": 794.815673828125, "learning_rate": 7.683333333333333e-06, "loss": 2.7309, "step": 703 }, { "epoch": 7.04, "grad_norm": 67.99777221679688, "learning_rate": 7.680000000000001e-06, "loss": 2.9492, "step": 704 }, { "epoch": 7.05, "grad_norm": 116.9759750366211, "learning_rate": 7.676666666666667e-06, "loss": 2.8777, "step": 705 }, { "epoch": 7.06, "grad_norm": 121.5567626953125, "learning_rate": 7.673333333333333e-06, "loss": 2.9753, "step": 706 }, { "epoch": 7.07, "grad_norm": 46.800960540771484, "learning_rate": 7.670000000000001e-06, "loss": 2.9117, "step": 707 }, { "epoch": 7.08, "grad_norm": 283.3844909667969, "learning_rate": 7.666666666666667e-06, "loss": 3.7733, "step": 708 }, { "epoch": 7.09, "grad_norm": 43.86688995361328, "learning_rate": 7.663333333333333e-06, "loss": 3.043, "step": 709 }, { "epoch": 7.1, "grad_norm": 165.683349609375, "learning_rate": 7.660000000000001e-06, "loss": 3.3653, "step": 710 }, { "epoch": 7.11, "grad_norm": 53.12596130371094, "learning_rate": 7.656666666666667e-06, "loss": 3.3669, "step": 711 }, { "epoch": 7.12, "grad_norm": 39.79900360107422, "learning_rate": 7.653333333333333e-06, "loss": 3.0311, "step": 712 }, { "epoch": 7.13, "grad_norm": 62.52997589111328, "learning_rate": 7.650000000000001e-06, "loss": 3.3257, "step": 713 }, { "epoch": 7.14, "grad_norm": 49.291526794433594, "learning_rate": 7.646666666666667e-06, "loss": 3.3564, "step": 714 }, { "epoch": 7.15, "grad_norm": 74.35525512695312, "learning_rate": 7.643333333333334e-06, "loss": 3.0424, "step": 715 }, { "epoch": 7.16, "grad_norm": 53.265872955322266, "learning_rate": 7.640000000000001e-06, "loss": 3.9874, "step": 716 }, { "epoch": 7.17, "grad_norm": 42.87297439575195, "learning_rate": 7.636666666666668e-06, "loss": 3.9573, "step": 717 }, { "epoch": 7.18, "grad_norm": 58.33640670776367, "learning_rate": 7.633333333333334e-06, "loss": 3.0609, "step": 718 }, { "epoch": 7.19, "grad_norm": 70.85275268554688, "learning_rate": 7.630000000000001e-06, "loss": 3.0062, "step": 719 }, { "epoch": 7.2, "grad_norm": 89.90322875976562, "learning_rate": 7.626666666666668e-06, "loss": 2.7442, "step": 720 }, { "epoch": 7.21, "grad_norm": 99.09687042236328, "learning_rate": 7.623333333333334e-06, "loss": 3.3946, "step": 721 }, { "epoch": 7.22, "grad_norm": 140.6915283203125, "learning_rate": 7.620000000000001e-06, "loss": 2.9282, "step": 722 }, { "epoch": 7.23, "grad_norm": 145.74598693847656, "learning_rate": 7.616666666666668e-06, "loss": 3.2442, "step": 723 }, { "epoch": 7.24, "grad_norm": 48.16111373901367, "learning_rate": 7.613333333333334e-06, "loss": 2.9072, "step": 724 }, { "epoch": 7.25, "grad_norm": 76.95262908935547, "learning_rate": 7.610000000000001e-06, "loss": 3.28, "step": 725 }, { "epoch": 7.26, "grad_norm": 65.84434509277344, "learning_rate": 7.606666666666668e-06, "loss": 2.7643, "step": 726 }, { "epoch": 7.27, "grad_norm": 76.0763168334961, "learning_rate": 7.603333333333334e-06, "loss": 3.3893, "step": 727 }, { "epoch": 7.28, "grad_norm": 93.06019592285156, "learning_rate": 7.600000000000001e-06, "loss": 3.1101, "step": 728 }, { "epoch": 7.29, "grad_norm": 88.32430267333984, "learning_rate": 7.596666666666668e-06, "loss": 3.6187, "step": 729 }, { "epoch": 7.3, "grad_norm": 51.10112380981445, "learning_rate": 7.593333333333334e-06, "loss": 3.2048, "step": 730 }, { "epoch": 7.31, "grad_norm": 39.45214080810547, "learning_rate": 7.590000000000001e-06, "loss": 6.3501, "step": 731 }, { "epoch": 7.32, "grad_norm": 61.62187957763672, "learning_rate": 7.586666666666668e-06, "loss": 2.6363, "step": 732 }, { "epoch": 7.33, "grad_norm": 90.4185562133789, "learning_rate": 7.583333333333333e-06, "loss": 3.627, "step": 733 }, { "epoch": 7.34, "grad_norm": 93.19239044189453, "learning_rate": 7.58e-06, "loss": 2.6709, "step": 734 }, { "epoch": 7.35, "grad_norm": 209.31251525878906, "learning_rate": 7.576666666666668e-06, "loss": 3.7138, "step": 735 }, { "epoch": 7.36, "grad_norm": 218.2025909423828, "learning_rate": 7.573333333333333e-06, "loss": 3.1477, "step": 736 }, { "epoch": 7.37, "grad_norm": 52.789222717285156, "learning_rate": 7.57e-06, "loss": 3.4203, "step": 737 }, { "epoch": 7.38, "grad_norm": 56.985042572021484, "learning_rate": 7.566666666666667e-06, "loss": 3.4269, "step": 738 }, { "epoch": 7.39, "grad_norm": 50.17646789550781, "learning_rate": 7.5633333333333335e-06, "loss": 4.8948, "step": 739 }, { "epoch": 7.4, "grad_norm": 78.00829315185547, "learning_rate": 7.5600000000000005e-06, "loss": 2.9927, "step": 740 }, { "epoch": 7.41, "grad_norm": 117.74728393554688, "learning_rate": 7.5566666666666674e-06, "loss": 2.9843, "step": 741 }, { "epoch": 7.42, "grad_norm": 72.24607849121094, "learning_rate": 7.553333333333334e-06, "loss": 3.0913, "step": 742 }, { "epoch": 7.43, "grad_norm": 71.2081069946289, "learning_rate": 7.5500000000000006e-06, "loss": 2.8723, "step": 743 }, { "epoch": 7.44, "grad_norm": 53.00714111328125, "learning_rate": 7.5466666666666675e-06, "loss": 3.0477, "step": 744 }, { "epoch": 7.45, "grad_norm": 70.08740997314453, "learning_rate": 7.543333333333334e-06, "loss": 2.5779, "step": 745 }, { "epoch": 7.46, "grad_norm": 77.89901733398438, "learning_rate": 7.540000000000001e-06, "loss": 2.8254, "step": 746 }, { "epoch": 7.47, "grad_norm": 38.48298263549805, "learning_rate": 7.536666666666668e-06, "loss": 3.477, "step": 747 }, { "epoch": 7.48, "grad_norm": 150.33303833007812, "learning_rate": 7.533333333333334e-06, "loss": 3.2538, "step": 748 }, { "epoch": 7.49, "grad_norm": 43.92172622680664, "learning_rate": 7.530000000000001e-06, "loss": 4.1258, "step": 749 }, { "epoch": 7.5, "grad_norm": 572.9395751953125, "learning_rate": 7.526666666666668e-06, "loss": 3.0818, "step": 750 }, { "epoch": 7.5, "eval_loss": 3.1502954959869385, "eval_map": 0.0022, "eval_map_50": 0.0051, "eval_map_75": 0.0016, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0002, "eval_map_medium": 0.005, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0838, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0071, "eval_map_small": 0.002, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0033, "eval_mar_10": 0.013, "eval_mar_100": 0.0183, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4552, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.2957, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0153, "eval_mar_medium": 0.0293, "eval_mar_small": 0.0176, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.2299, "eval_samples_per_second": 5.485, "eval_steps_per_second": 1.371, "step": 750 }, { "epoch": 7.51, "grad_norm": 51.106597900390625, "learning_rate": 7.523333333333334e-06, "loss": 3.0251, "step": 751 }, { "epoch": 7.52, "grad_norm": 74.80470275878906, "learning_rate": 7.520000000000001e-06, "loss": 3.0297, "step": 752 }, { "epoch": 7.53, "grad_norm": 32.76933670043945, "learning_rate": 7.516666666666668e-06, "loss": 3.4333, "step": 753 }, { "epoch": 7.54, "grad_norm": 88.50602722167969, "learning_rate": 7.513333333333334e-06, "loss": 3.5545, "step": 754 }, { "epoch": 7.55, "grad_norm": 36.337276458740234, "learning_rate": 7.510000000000001e-06, "loss": 4.2831, "step": 755 }, { "epoch": 7.5600000000000005, "grad_norm": 59.12882995605469, "learning_rate": 7.506666666666668e-06, "loss": 3.246, "step": 756 }, { "epoch": 7.57, "grad_norm": 35.45384979248047, "learning_rate": 7.503333333333333e-06, "loss": 3.0951, "step": 757 }, { "epoch": 7.58, "grad_norm": 42.827369689941406, "learning_rate": 7.500000000000001e-06, "loss": 3.6602, "step": 758 }, { "epoch": 7.59, "grad_norm": 54.90861892700195, "learning_rate": 7.496666666666668e-06, "loss": 2.9029, "step": 759 }, { "epoch": 7.6, "grad_norm": 70.6822738647461, "learning_rate": 7.493333333333333e-06, "loss": 3.6226, "step": 760 }, { "epoch": 7.61, "grad_norm": 50.77410125732422, "learning_rate": 7.49e-06, "loss": 4.0331, "step": 761 }, { "epoch": 7.62, "grad_norm": 356.15106201171875, "learning_rate": 7.486666666666667e-06, "loss": 3.1047, "step": 762 }, { "epoch": 7.63, "grad_norm": 111.87789916992188, "learning_rate": 7.483333333333333e-06, "loss": 2.752, "step": 763 }, { "epoch": 7.64, "grad_norm": 82.2522201538086, "learning_rate": 7.48e-06, "loss": 3.567, "step": 764 }, { "epoch": 7.65, "grad_norm": 70.77613830566406, "learning_rate": 7.476666666666667e-06, "loss": 3.084, "step": 765 }, { "epoch": 7.66, "grad_norm": 45.750972747802734, "learning_rate": 7.4733333333333335e-06, "loss": 3.3079, "step": 766 }, { "epoch": 7.67, "grad_norm": 86.27755737304688, "learning_rate": 7.4700000000000005e-06, "loss": 3.2543, "step": 767 }, { "epoch": 7.68, "grad_norm": 43.499244689941406, "learning_rate": 7.4666666666666675e-06, "loss": 3.0941, "step": 768 }, { "epoch": 7.6899999999999995, "grad_norm": 55.52049255371094, "learning_rate": 7.463333333333334e-06, "loss": 5.963, "step": 769 }, { "epoch": 7.7, "grad_norm": 64.39413452148438, "learning_rate": 7.4600000000000006e-06, "loss": 4.3139, "step": 770 }, { "epoch": 7.71, "grad_norm": 114.85185241699219, "learning_rate": 7.4566666666666676e-06, "loss": 2.8004, "step": 771 }, { "epoch": 7.72, "grad_norm": 47.704063415527344, "learning_rate": 7.453333333333334e-06, "loss": 3.3039, "step": 772 }, { "epoch": 7.73, "grad_norm": 336.514892578125, "learning_rate": 7.450000000000001e-06, "loss": 2.8433, "step": 773 }, { "epoch": 7.74, "grad_norm": 67.91984558105469, "learning_rate": 7.446666666666668e-06, "loss": 3.4646, "step": 774 }, { "epoch": 7.75, "grad_norm": 37.458641052246094, "learning_rate": 7.443333333333334e-06, "loss": 3.832, "step": 775 }, { "epoch": 7.76, "grad_norm": 47.3620491027832, "learning_rate": 7.440000000000001e-06, "loss": 3.0565, "step": 776 }, { "epoch": 7.77, "grad_norm": 36.92617416381836, "learning_rate": 7.436666666666668e-06, "loss": 3.1253, "step": 777 }, { "epoch": 7.78, "grad_norm": 94.36244201660156, "learning_rate": 7.433333333333334e-06, "loss": 3.3664, "step": 778 }, { "epoch": 7.79, "grad_norm": 127.9669189453125, "learning_rate": 7.430000000000001e-06, "loss": 3.1657, "step": 779 }, { "epoch": 7.8, "grad_norm": 33.09944152832031, "learning_rate": 7.426666666666668e-06, "loss": 3.5799, "step": 780 }, { "epoch": 7.8100000000000005, "grad_norm": 50.19860076904297, "learning_rate": 7.423333333333333e-06, "loss": 2.6146, "step": 781 }, { "epoch": 7.82, "grad_norm": 40.96757125854492, "learning_rate": 7.420000000000001e-06, "loss": 2.8959, "step": 782 }, { "epoch": 7.83, "grad_norm": 33.10691833496094, "learning_rate": 7.416666666666668e-06, "loss": 2.9093, "step": 783 }, { "epoch": 7.84, "grad_norm": 32.21234130859375, "learning_rate": 7.413333333333333e-06, "loss": 3.9679, "step": 784 }, { "epoch": 7.85, "grad_norm": 200.1700897216797, "learning_rate": 7.41e-06, "loss": 3.5455, "step": 785 }, { "epoch": 7.86, "grad_norm": 68.7276382446289, "learning_rate": 7.406666666666667e-06, "loss": 2.9758, "step": 786 }, { "epoch": 7.87, "grad_norm": 47.1053466796875, "learning_rate": 7.403333333333333e-06, "loss": 3.6659, "step": 787 }, { "epoch": 7.88, "grad_norm": 43.193721771240234, "learning_rate": 7.4e-06, "loss": 3.5513, "step": 788 }, { "epoch": 7.89, "grad_norm": 126.39387512207031, "learning_rate": 7.396666666666667e-06, "loss": 2.8191, "step": 789 }, { "epoch": 7.9, "grad_norm": 58.705352783203125, "learning_rate": 7.393333333333333e-06, "loss": 3.0646, "step": 790 }, { "epoch": 7.91, "grad_norm": 105.86882019042969, "learning_rate": 7.39e-06, "loss": 2.4009, "step": 791 }, { "epoch": 7.92, "grad_norm": 42.31919860839844, "learning_rate": 7.386666666666667e-06, "loss": 3.0601, "step": 792 }, { "epoch": 7.93, "grad_norm": 41.90841293334961, "learning_rate": 7.3833333333333335e-06, "loss": 3.242, "step": 793 }, { "epoch": 7.9399999999999995, "grad_norm": 55.34571838378906, "learning_rate": 7.3800000000000005e-06, "loss": 3.5375, "step": 794 }, { "epoch": 7.95, "grad_norm": 194.9851531982422, "learning_rate": 7.3766666666666675e-06, "loss": 3.3462, "step": 795 }, { "epoch": 7.96, "grad_norm": 423.6452941894531, "learning_rate": 7.373333333333334e-06, "loss": 3.2036, "step": 796 }, { "epoch": 7.97, "grad_norm": 62.389400482177734, "learning_rate": 7.370000000000001e-06, "loss": 3.5607, "step": 797 }, { "epoch": 7.98, "grad_norm": 82.9281997680664, "learning_rate": 7.3666666666666676e-06, "loss": 3.7713, "step": 798 }, { "epoch": 7.99, "grad_norm": 44.885807037353516, "learning_rate": 7.363333333333334e-06, "loss": 3.0072, "step": 799 }, { "epoch": 8.0, "grad_norm": 58.460975646972656, "learning_rate": 7.360000000000001e-06, "loss": 3.246, "step": 800 }, { "epoch": 8.0, "eval_loss": 3.126610040664673, "eval_map": 0.0027, "eval_map_50": 0.0064, "eval_map_75": 0.0017, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0002, "eval_map_medium": 0.0055, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1021, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0087, "eval_map_small": 0.0026, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0032, "eval_mar_10": 0.0132, "eval_mar_100": 0.0188, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4448, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.3278, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0165, "eval_mar_medium": 0.0302, "eval_mar_small": 0.0169, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.0169, "eval_samples_per_second": 5.258, "eval_steps_per_second": 1.315, "step": 800 }, { "epoch": 8.01, "grad_norm": 72.38507080078125, "learning_rate": 7.356666666666668e-06, "loss": 2.8748, "step": 801 }, { "epoch": 8.02, "grad_norm": 38.8370246887207, "learning_rate": 7.353333333333334e-06, "loss": 3.5536, "step": 802 }, { "epoch": 8.03, "grad_norm": 65.32669067382812, "learning_rate": 7.350000000000001e-06, "loss": 3.0536, "step": 803 }, { "epoch": 8.04, "grad_norm": 238.85113525390625, "learning_rate": 7.346666666666668e-06, "loss": 3.5565, "step": 804 }, { "epoch": 8.05, "grad_norm": 95.748291015625, "learning_rate": 7.343333333333333e-06, "loss": 2.3051, "step": 805 }, { "epoch": 8.06, "grad_norm": 43.916908264160156, "learning_rate": 7.340000000000001e-06, "loss": 4.2556, "step": 806 }, { "epoch": 8.07, "grad_norm": 48.42106628417969, "learning_rate": 7.336666666666668e-06, "loss": 2.9665, "step": 807 }, { "epoch": 8.08, "grad_norm": 84.63587951660156, "learning_rate": 7.333333333333333e-06, "loss": 3.1359, "step": 808 }, { "epoch": 8.09, "grad_norm": 77.0198745727539, "learning_rate": 7.33e-06, "loss": 3.0496, "step": 809 }, { "epoch": 8.1, "grad_norm": 46.36822509765625, "learning_rate": 7.326666666666667e-06, "loss": 3.2286, "step": 810 }, { "epoch": 8.11, "grad_norm": 57.2409553527832, "learning_rate": 7.323333333333333e-06, "loss": 3.1445, "step": 811 }, { "epoch": 8.12, "grad_norm": 56.040164947509766, "learning_rate": 7.32e-06, "loss": 2.5845, "step": 812 }, { "epoch": 8.13, "grad_norm": 55.69404220581055, "learning_rate": 7.316666666666667e-06, "loss": 3.5764, "step": 813 }, { "epoch": 8.14, "grad_norm": 84.94974517822266, "learning_rate": 7.313333333333333e-06, "loss": 3.2605, "step": 814 }, { "epoch": 8.15, "grad_norm": 78.10903930664062, "learning_rate": 7.31e-06, "loss": 3.1368, "step": 815 }, { "epoch": 8.16, "grad_norm": 66.232177734375, "learning_rate": 7.306666666666667e-06, "loss": 2.3061, "step": 816 }, { "epoch": 8.17, "grad_norm": 38.35574722290039, "learning_rate": 7.3033333333333334e-06, "loss": 2.9627, "step": 817 }, { "epoch": 8.18, "grad_norm": 80.08656311035156, "learning_rate": 7.3e-06, "loss": 2.912, "step": 818 }, { "epoch": 8.19, "grad_norm": 33.788936614990234, "learning_rate": 7.296666666666667e-06, "loss": 3.2501, "step": 819 }, { "epoch": 8.2, "grad_norm": 34.0781135559082, "learning_rate": 7.2933333333333335e-06, "loss": 3.0481, "step": 820 }, { "epoch": 8.21, "grad_norm": 115.54595184326172, "learning_rate": 7.2900000000000005e-06, "loss": 3.7608, "step": 821 }, { "epoch": 8.22, "grad_norm": 85.47960662841797, "learning_rate": 7.2866666666666675e-06, "loss": 2.8012, "step": 822 }, { "epoch": 8.23, "grad_norm": 69.00477600097656, "learning_rate": 7.2833333333333345e-06, "loss": 3.047, "step": 823 }, { "epoch": 8.24, "grad_norm": 72.28192901611328, "learning_rate": 7.280000000000001e-06, "loss": 2.4735, "step": 824 }, { "epoch": 8.25, "grad_norm": 39.43647384643555, "learning_rate": 7.276666666666668e-06, "loss": 3.0938, "step": 825 }, { "epoch": 8.26, "grad_norm": 41.71284484863281, "learning_rate": 7.2733333333333346e-06, "loss": 3.7789, "step": 826 }, { "epoch": 8.27, "grad_norm": 37.6790885925293, "learning_rate": 7.270000000000001e-06, "loss": 3.1523, "step": 827 }, { "epoch": 8.28, "grad_norm": 60.79823303222656, "learning_rate": 7.266666666666668e-06, "loss": 3.3979, "step": 828 }, { "epoch": 8.29, "grad_norm": 40.89284896850586, "learning_rate": 7.263333333333335e-06, "loss": 3.3733, "step": 829 }, { "epoch": 8.3, "grad_norm": 57.98153305053711, "learning_rate": 7.260000000000001e-06, "loss": 2.9705, "step": 830 }, { "epoch": 8.31, "grad_norm": 52.086204528808594, "learning_rate": 7.256666666666668e-06, "loss": 2.5855, "step": 831 }, { "epoch": 8.32, "grad_norm": 81.71173095703125, "learning_rate": 7.253333333333335e-06, "loss": 2.8256, "step": 832 }, { "epoch": 8.33, "grad_norm": 49.69612121582031, "learning_rate": 7.25e-06, "loss": 2.8827, "step": 833 }, { "epoch": 8.34, "grad_norm": 49.411739349365234, "learning_rate": 7.246666666666667e-06, "loss": 6.5968, "step": 834 }, { "epoch": 8.35, "grad_norm": 100.54804229736328, "learning_rate": 7.243333333333335e-06, "loss": 2.6735, "step": 835 }, { "epoch": 8.36, "grad_norm": 301.5572509765625, "learning_rate": 7.24e-06, "loss": 2.9844, "step": 836 }, { "epoch": 8.37, "grad_norm": 79.59809875488281, "learning_rate": 7.236666666666667e-06, "loss": 3.0464, "step": 837 }, { "epoch": 8.38, "grad_norm": 89.81795501708984, "learning_rate": 7.233333333333334e-06, "loss": 3.2796, "step": 838 }, { "epoch": 8.39, "grad_norm": 50.233619689941406, "learning_rate": 7.23e-06, "loss": 2.9938, "step": 839 }, { "epoch": 8.4, "grad_norm": 52.59807205200195, "learning_rate": 7.226666666666667e-06, "loss": 3.4888, "step": 840 }, { "epoch": 8.41, "grad_norm": 58.66334915161133, "learning_rate": 7.223333333333334e-06, "loss": 4.302, "step": 841 }, { "epoch": 8.42, "grad_norm": 29.6657772064209, "learning_rate": 7.22e-06, "loss": 6.3396, "step": 842 }, { "epoch": 8.43, "grad_norm": 44.512001037597656, "learning_rate": 7.216666666666667e-06, "loss": 3.3798, "step": 843 }, { "epoch": 8.44, "grad_norm": 118.5077133178711, "learning_rate": 7.213333333333334e-06, "loss": 3.1955, "step": 844 }, { "epoch": 8.45, "grad_norm": 38.47626495361328, "learning_rate": 7.2100000000000004e-06, "loss": 2.7519, "step": 845 }, { "epoch": 8.46, "grad_norm": 38.55072021484375, "learning_rate": 7.206666666666667e-06, "loss": 3.2531, "step": 846 }, { "epoch": 8.47, "grad_norm": 48.716243743896484, "learning_rate": 7.203333333333334e-06, "loss": 2.8709, "step": 847 }, { "epoch": 8.48, "grad_norm": 30.50242042541504, "learning_rate": 7.2000000000000005e-06, "loss": 3.6876, "step": 848 }, { "epoch": 8.49, "grad_norm": 52.15181350708008, "learning_rate": 7.1966666666666675e-06, "loss": 3.5691, "step": 849 }, { "epoch": 8.5, "grad_norm": 52.35666275024414, "learning_rate": 7.1933333333333345e-06, "loss": 3.1945, "step": 850 }, { "epoch": 8.5, "eval_loss": 3.096881151199341, "eval_map": 0.0029, "eval_map_50": 0.0067, "eval_map_75": 0.0019, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0002, "eval_map_medium": 0.0059, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1093, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0114, "eval_map_small": 0.0029, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0036, "eval_mar_10": 0.015, "eval_mar_100": 0.0203, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4709, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.3617, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0182, "eval_mar_medium": 0.033, "eval_mar_small": 0.0176, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.4135, "eval_samples_per_second": 5.431, "eval_steps_per_second": 1.358, "step": 850 }, { "epoch": 8.51, "grad_norm": 43.25682830810547, "learning_rate": 7.190000000000001e-06, "loss": 2.9606, "step": 851 }, { "epoch": 8.52, "grad_norm": 41.469234466552734, "learning_rate": 7.186666666666668e-06, "loss": 3.2038, "step": 852 }, { "epoch": 8.53, "grad_norm": 68.10787963867188, "learning_rate": 7.183333333333335e-06, "loss": 3.1581, "step": 853 }, { "epoch": 8.54, "grad_norm": 101.062255859375, "learning_rate": 7.180000000000001e-06, "loss": 3.5805, "step": 854 }, { "epoch": 8.55, "grad_norm": 83.92488098144531, "learning_rate": 7.176666666666668e-06, "loss": 3.1565, "step": 855 }, { "epoch": 8.56, "grad_norm": 42.30232620239258, "learning_rate": 7.173333333333335e-06, "loss": 3.4445, "step": 856 }, { "epoch": 8.57, "grad_norm": 37.17006301879883, "learning_rate": 7.17e-06, "loss": 3.0732, "step": 857 }, { "epoch": 8.58, "grad_norm": 46.15174865722656, "learning_rate": 7.166666666666667e-06, "loss": 4.6128, "step": 858 }, { "epoch": 8.59, "grad_norm": 64.62358093261719, "learning_rate": 7.163333333333335e-06, "loss": 3.5723, "step": 859 }, { "epoch": 8.6, "grad_norm": 82.51624298095703, "learning_rate": 7.16e-06, "loss": 2.7343, "step": 860 }, { "epoch": 8.61, "grad_norm": 1430.31884765625, "learning_rate": 7.156666666666667e-06, "loss": 3.4424, "step": 861 }, { "epoch": 8.62, "grad_norm": 51.89326858520508, "learning_rate": 7.153333333333334e-06, "loss": 3.5876, "step": 862 }, { "epoch": 8.63, "grad_norm": 70.75638580322266, "learning_rate": 7.15e-06, "loss": 3.0999, "step": 863 }, { "epoch": 8.64, "grad_norm": 64.23765563964844, "learning_rate": 7.146666666666667e-06, "loss": 3.3758, "step": 864 }, { "epoch": 8.65, "grad_norm": 36.40494155883789, "learning_rate": 7.143333333333334e-06, "loss": 3.2285, "step": 865 }, { "epoch": 8.66, "grad_norm": 86.4923324584961, "learning_rate": 7.14e-06, "loss": 3.6236, "step": 866 }, { "epoch": 8.67, "grad_norm": 42.76805877685547, "learning_rate": 7.136666666666667e-06, "loss": 2.825, "step": 867 }, { "epoch": 8.68, "grad_norm": 69.01063537597656, "learning_rate": 7.133333333333334e-06, "loss": 2.9551, "step": 868 }, { "epoch": 8.69, "grad_norm": 102.037353515625, "learning_rate": 7.13e-06, "loss": 2.7165, "step": 869 }, { "epoch": 8.7, "grad_norm": 70.99171447753906, "learning_rate": 7.126666666666667e-06, "loss": 3.8967, "step": 870 }, { "epoch": 8.71, "grad_norm": 79.67378234863281, "learning_rate": 7.123333333333334e-06, "loss": 3.0727, "step": 871 }, { "epoch": 8.72, "grad_norm": 28.88434410095215, "learning_rate": 7.1200000000000004e-06, "loss": 3.8211, "step": 872 }, { "epoch": 8.73, "grad_norm": 77.50942993164062, "learning_rate": 7.116666666666667e-06, "loss": 3.4059, "step": 873 }, { "epoch": 8.74, "grad_norm": 35.490169525146484, "learning_rate": 7.113333333333334e-06, "loss": 4.0975, "step": 874 }, { "epoch": 8.75, "grad_norm": 89.11698913574219, "learning_rate": 7.1100000000000005e-06, "loss": 2.9145, "step": 875 }, { "epoch": 8.76, "grad_norm": 68.19637298583984, "learning_rate": 7.1066666666666675e-06, "loss": 3.6586, "step": 876 }, { "epoch": 8.77, "grad_norm": 301.9892578125, "learning_rate": 7.1033333333333345e-06, "loss": 2.9785, "step": 877 }, { "epoch": 8.78, "grad_norm": 60.10928726196289, "learning_rate": 7.100000000000001e-06, "loss": 3.4997, "step": 878 }, { "epoch": 8.79, "grad_norm": 43.670509338378906, "learning_rate": 7.096666666666668e-06, "loss": 3.2788, "step": 879 }, { "epoch": 8.8, "grad_norm": 78.75917053222656, "learning_rate": 7.093333333333335e-06, "loss": 3.5501, "step": 880 }, { "epoch": 8.81, "grad_norm": 58.42558288574219, "learning_rate": 7.09e-06, "loss": 3.3796, "step": 881 }, { "epoch": 8.82, "grad_norm": 105.85246276855469, "learning_rate": 7.086666666666667e-06, "loss": 3.3144, "step": 882 }, { "epoch": 8.83, "grad_norm": 83.42520141601562, "learning_rate": 7.083333333333335e-06, "loss": 3.9503, "step": 883 }, { "epoch": 8.84, "grad_norm": 69.04145050048828, "learning_rate": 7.08e-06, "loss": 3.2296, "step": 884 }, { "epoch": 8.85, "grad_norm": 53.14678955078125, "learning_rate": 7.076666666666667e-06, "loss": 3.3097, "step": 885 }, { "epoch": 8.86, "grad_norm": 58.73151397705078, "learning_rate": 7.073333333333334e-06, "loss": 3.4367, "step": 886 }, { "epoch": 8.87, "grad_norm": 45.56757736206055, "learning_rate": 7.07e-06, "loss": 2.8249, "step": 887 }, { "epoch": 8.88, "grad_norm": 87.50658416748047, "learning_rate": 7.066666666666667e-06, "loss": 2.9243, "step": 888 }, { "epoch": 8.89, "grad_norm": 47.36873245239258, "learning_rate": 7.063333333333334e-06, "loss": 3.3115, "step": 889 }, { "epoch": 8.9, "grad_norm": 53.95075607299805, "learning_rate": 7.06e-06, "loss": 3.8066, "step": 890 }, { "epoch": 8.91, "grad_norm": 197.09585571289062, "learning_rate": 7.056666666666667e-06, "loss": 2.3395, "step": 891 }, { "epoch": 8.92, "grad_norm": 78.1181640625, "learning_rate": 7.053333333333334e-06, "loss": 2.7352, "step": 892 }, { "epoch": 8.93, "grad_norm": 39.780433654785156, "learning_rate": 7.05e-06, "loss": 3.1087, "step": 893 }, { "epoch": 8.94, "grad_norm": 41.076515197753906, "learning_rate": 7.046666666666667e-06, "loss": 3.6963, "step": 894 }, { "epoch": 8.95, "grad_norm": 46.37641906738281, "learning_rate": 7.043333333333334e-06, "loss": 2.8818, "step": 895 }, { "epoch": 8.96, "grad_norm": 79.4222640991211, "learning_rate": 7.04e-06, "loss": 3.0806, "step": 896 }, { "epoch": 8.97, "grad_norm": 202.02175903320312, "learning_rate": 7.036666666666667e-06, "loss": 2.7988, "step": 897 }, { "epoch": 8.98, "grad_norm": 113.80973815917969, "learning_rate": 7.033333333333334e-06, "loss": 3.1003, "step": 898 }, { "epoch": 8.99, "grad_norm": 41.925262451171875, "learning_rate": 7.0300000000000005e-06, "loss": 3.376, "step": 899 }, { "epoch": 9.0, "grad_norm": 72.86280822753906, "learning_rate": 7.0266666666666674e-06, "loss": 2.9341, "step": 900 }, { "epoch": 9.0, "eval_loss": 3.0638954639434814, "eval_map": 0.003, "eval_map_50": 0.0068, "eval_map_75": 0.0023, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0004, "eval_map_medium": 0.0056, "eval_map_neckline": 0.0, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1145, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0083, "eval_map_small": 0.0036, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0031, "eval_mar_10": 0.0137, "eval_mar_100": 0.0191, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4388, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.3435, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0128, "eval_mar_medium": 0.0313, "eval_mar_small": 0.0149, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.6666, "eval_samples_per_second": 5.357, "eval_steps_per_second": 1.339, "step": 900 }, { "epoch": 9.01, "grad_norm": 43.11838912963867, "learning_rate": 7.023333333333334e-06, "loss": 3.103, "step": 901 }, { "epoch": 9.02, "grad_norm": 44.2508544921875, "learning_rate": 7.0200000000000006e-06, "loss": 2.7347, "step": 902 }, { "epoch": 9.03, "grad_norm": 77.50755310058594, "learning_rate": 7.0166666666666675e-06, "loss": 2.8879, "step": 903 }, { "epoch": 9.04, "grad_norm": 63.20185089111328, "learning_rate": 7.0133333333333345e-06, "loss": 3.6103, "step": 904 }, { "epoch": 9.05, "grad_norm": 53.10676574707031, "learning_rate": 7.01e-06, "loss": 2.7754, "step": 905 }, { "epoch": 9.06, "grad_norm": 45.018619537353516, "learning_rate": 7.006666666666667e-06, "loss": 3.5357, "step": 906 }, { "epoch": 9.07, "grad_norm": 87.9382553100586, "learning_rate": 7.003333333333335e-06, "loss": 3.1061, "step": 907 }, { "epoch": 9.08, "grad_norm": 36.46385192871094, "learning_rate": 7e-06, "loss": 3.1833, "step": 908 }, { "epoch": 9.09, "grad_norm": 40.3581428527832, "learning_rate": 6.996666666666667e-06, "loss": 3.1722, "step": 909 }, { "epoch": 9.1, "grad_norm": 55.30073928833008, "learning_rate": 6.993333333333334e-06, "loss": 3.3975, "step": 910 }, { "epoch": 9.11, "grad_norm": 78.57398223876953, "learning_rate": 6.99e-06, "loss": 2.9439, "step": 911 }, { "epoch": 9.12, "grad_norm": 72.84364318847656, "learning_rate": 6.986666666666667e-06, "loss": 2.5332, "step": 912 }, { "epoch": 9.13, "grad_norm": 48.99262237548828, "learning_rate": 6.983333333333334e-06, "loss": 3.3508, "step": 913 }, { "epoch": 9.14, "grad_norm": 57.501808166503906, "learning_rate": 6.98e-06, "loss": 2.7936, "step": 914 }, { "epoch": 9.15, "grad_norm": 41.6580924987793, "learning_rate": 6.976666666666667e-06, "loss": 2.4378, "step": 915 }, { "epoch": 9.16, "grad_norm": 220.61375427246094, "learning_rate": 6.973333333333334e-06, "loss": 2.8267, "step": 916 }, { "epoch": 9.17, "grad_norm": 89.43002319335938, "learning_rate": 6.97e-06, "loss": 2.9199, "step": 917 }, { "epoch": 9.18, "grad_norm": 70.80623626708984, "learning_rate": 6.966666666666667e-06, "loss": 3.318, "step": 918 }, { "epoch": 9.19, "grad_norm": 77.61578369140625, "learning_rate": 6.963333333333334e-06, "loss": 4.3018, "step": 919 }, { "epoch": 9.2, "grad_norm": 37.03392028808594, "learning_rate": 6.96e-06, "loss": 3.1413, "step": 920 }, { "epoch": 9.21, "grad_norm": 64.72554779052734, "learning_rate": 6.956666666666667e-06, "loss": 3.8212, "step": 921 }, { "epoch": 9.22, "grad_norm": 45.9349365234375, "learning_rate": 6.953333333333334e-06, "loss": 3.1277, "step": 922 }, { "epoch": 9.23, "grad_norm": 34.54985427856445, "learning_rate": 6.95e-06, "loss": 3.0901, "step": 923 }, { "epoch": 9.24, "grad_norm": 85.215576171875, "learning_rate": 6.946666666666667e-06, "loss": 3.5761, "step": 924 }, { "epoch": 9.25, "grad_norm": 54.06900405883789, "learning_rate": 6.943333333333334e-06, "loss": 3.6229, "step": 925 }, { "epoch": 9.26, "grad_norm": 85.98793029785156, "learning_rate": 6.9400000000000005e-06, "loss": 3.5074, "step": 926 }, { "epoch": 9.27, "grad_norm": 49.23918914794922, "learning_rate": 6.9366666666666675e-06, "loss": 3.2552, "step": 927 }, { "epoch": 9.28, "grad_norm": 58.45280456542969, "learning_rate": 6.9333333333333344e-06, "loss": 3.2252, "step": 928 }, { "epoch": 9.29, "grad_norm": 65.75541687011719, "learning_rate": 6.93e-06, "loss": 3.0659, "step": 929 }, { "epoch": 9.3, "grad_norm": 83.8568344116211, "learning_rate": 6.926666666666667e-06, "loss": 3.2822, "step": 930 }, { "epoch": 9.31, "grad_norm": 46.099910736083984, "learning_rate": 6.9233333333333345e-06, "loss": 3.5501, "step": 931 }, { "epoch": 9.32, "grad_norm": 72.86409759521484, "learning_rate": 6.92e-06, "loss": 3.4844, "step": 932 }, { "epoch": 9.33, "grad_norm": 32.59096145629883, "learning_rate": 6.916666666666667e-06, "loss": 2.8902, "step": 933 }, { "epoch": 9.34, "grad_norm": 88.95465850830078, "learning_rate": 6.913333333333334e-06, "loss": 2.8786, "step": 934 }, { "epoch": 9.35, "grad_norm": 85.3768539428711, "learning_rate": 6.91e-06, "loss": 2.914, "step": 935 }, { "epoch": 9.36, "grad_norm": 87.25706481933594, "learning_rate": 6.906666666666667e-06, "loss": 3.2705, "step": 936 }, { "epoch": 9.37, "grad_norm": 78.9551010131836, "learning_rate": 6.903333333333334e-06, "loss": 3.3611, "step": 937 }, { "epoch": 9.38, "grad_norm": 88.18824768066406, "learning_rate": 6.9e-06, "loss": 2.9301, "step": 938 }, { "epoch": 9.39, "grad_norm": 44.59553909301758, "learning_rate": 6.896666666666667e-06, "loss": 4.0873, "step": 939 }, { "epoch": 9.4, "grad_norm": 24.620365142822266, "learning_rate": 6.893333333333334e-06, "loss": 4.0211, "step": 940 }, { "epoch": 9.41, "grad_norm": 246.9785919189453, "learning_rate": 6.89e-06, "loss": 3.2207, "step": 941 }, { "epoch": 9.42, "grad_norm": 664.0454711914062, "learning_rate": 6.886666666666667e-06, "loss": 3.4812, "step": 942 }, { "epoch": 9.43, "grad_norm": 126.53782653808594, "learning_rate": 6.883333333333334e-06, "loss": 3.1248, "step": 943 }, { "epoch": 9.44, "grad_norm": 76.63378143310547, "learning_rate": 6.88e-06, "loss": 2.3175, "step": 944 }, { "epoch": 9.45, "grad_norm": 97.13172912597656, "learning_rate": 6.876666666666667e-06, "loss": 2.9624, "step": 945 }, { "epoch": 9.46, "grad_norm": 61.49402618408203, "learning_rate": 6.873333333333334e-06, "loss": 3.4008, "step": 946 }, { "epoch": 9.47, "grad_norm": 62.71019744873047, "learning_rate": 6.870000000000001e-06, "loss": 2.6293, "step": 947 }, { "epoch": 9.48, "grad_norm": 91.89797973632812, "learning_rate": 6.866666666666667e-06, "loss": 3.0077, "step": 948 }, { "epoch": 9.49, "grad_norm": 147.23626708984375, "learning_rate": 6.863333333333334e-06, "loss": 3.0049, "step": 949 }, { "epoch": 9.5, "grad_norm": 68.47087097167969, "learning_rate": 6.860000000000001e-06, "loss": 2.9812, "step": 950 }, { "epoch": 9.5, "eval_loss": 3.0262808799743652, "eval_map": 0.0028, "eval_map_50": 0.0066, "eval_map_75": 0.0017, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0004, "eval_map_medium": 0.0057, "eval_map_neckline": 0.0049, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1028, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0091, "eval_map_small": 0.003, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0039, "eval_mar_10": 0.014, "eval_mar_100": 0.0197, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0159, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4425, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.3504, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0165, "eval_mar_medium": 0.0332, "eval_mar_small": 0.0156, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.174, "eval_samples_per_second": 5.215, "eval_steps_per_second": 1.304, "step": 950 }, { "epoch": 9.51, "grad_norm": 156.07957458496094, "learning_rate": 6.856666666666667e-06, "loss": 3.2977, "step": 951 }, { "epoch": 9.52, "grad_norm": 67.2162094116211, "learning_rate": 6.853333333333334e-06, "loss": 3.2486, "step": 952 }, { "epoch": 9.53, "grad_norm": 44.917293548583984, "learning_rate": 6.850000000000001e-06, "loss": 3.3633, "step": 953 }, { "epoch": 9.54, "grad_norm": 69.49148559570312, "learning_rate": 6.846666666666667e-06, "loss": 3.1782, "step": 954 }, { "epoch": 9.55, "grad_norm": 63.152339935302734, "learning_rate": 6.8433333333333344e-06, "loss": 2.6623, "step": 955 }, { "epoch": 9.56, "grad_norm": 43.6594352722168, "learning_rate": 6.8400000000000014e-06, "loss": 3.6439, "step": 956 }, { "epoch": 9.57, "grad_norm": 48.14381408691406, "learning_rate": 6.836666666666667e-06, "loss": 3.3927, "step": 957 }, { "epoch": 9.58, "grad_norm": 34.92618942260742, "learning_rate": 6.833333333333334e-06, "loss": 2.673, "step": 958 }, { "epoch": 9.59, "grad_norm": 53.683589935302734, "learning_rate": 6.830000000000001e-06, "loss": 2.7076, "step": 959 }, { "epoch": 9.6, "grad_norm": 108.03275299072266, "learning_rate": 6.826666666666667e-06, "loss": 2.8242, "step": 960 }, { "epoch": 9.61, "grad_norm": 48.62644577026367, "learning_rate": 6.823333333333334e-06, "loss": 3.2196, "step": 961 }, { "epoch": 9.62, "grad_norm": 311.4502258300781, "learning_rate": 6.820000000000001e-06, "loss": 3.023, "step": 962 }, { "epoch": 9.63, "grad_norm": 50.26765060424805, "learning_rate": 6.816666666666667e-06, "loss": 3.3501, "step": 963 }, { "epoch": 9.64, "grad_norm": 48.62815856933594, "learning_rate": 6.813333333333334e-06, "loss": 3.1463, "step": 964 }, { "epoch": 9.65, "grad_norm": 54.20383834838867, "learning_rate": 6.810000000000001e-06, "loss": 3.3256, "step": 965 }, { "epoch": 9.66, "grad_norm": 52.40863800048828, "learning_rate": 6.806666666666667e-06, "loss": 3.0918, "step": 966 }, { "epoch": 9.67, "grad_norm": 113.42117309570312, "learning_rate": 6.803333333333334e-06, "loss": 3.0608, "step": 967 }, { "epoch": 9.68, "grad_norm": 83.4305419921875, "learning_rate": 6.800000000000001e-06, "loss": 3.502, "step": 968 }, { "epoch": 9.69, "grad_norm": 177.79095458984375, "learning_rate": 6.796666666666667e-06, "loss": 2.7089, "step": 969 }, { "epoch": 9.7, "grad_norm": 46.521522521972656, "learning_rate": 6.793333333333334e-06, "loss": 3.504, "step": 970 }, { "epoch": 9.71, "grad_norm": 63.85769271850586, "learning_rate": 6.790000000000001e-06, "loss": 3.0648, "step": 971 }, { "epoch": 9.72, "grad_norm": 110.34920501708984, "learning_rate": 6.786666666666667e-06, "loss": 3.5513, "step": 972 }, { "epoch": 9.73, "grad_norm": 46.840431213378906, "learning_rate": 6.783333333333334e-06, "loss": 2.5334, "step": 973 }, { "epoch": 9.74, "grad_norm": 72.69619750976562, "learning_rate": 6.780000000000001e-06, "loss": 3.5658, "step": 974 }, { "epoch": 9.75, "grad_norm": 121.007080078125, "learning_rate": 6.776666666666667e-06, "loss": 3.5602, "step": 975 }, { "epoch": 9.76, "grad_norm": 54.97415542602539, "learning_rate": 6.773333333333334e-06, "loss": 3.1006, "step": 976 }, { "epoch": 9.77, "grad_norm": 130.57785034179688, "learning_rate": 6.770000000000001e-06, "loss": 2.7477, "step": 977 }, { "epoch": 9.78, "grad_norm": 254.47193908691406, "learning_rate": 6.7666666666666665e-06, "loss": 2.8491, "step": 978 }, { "epoch": 9.79, "grad_norm": 170.51235961914062, "learning_rate": 6.763333333333334e-06, "loss": 2.5385, "step": 979 }, { "epoch": 9.8, "grad_norm": 67.36145782470703, "learning_rate": 6.760000000000001e-06, "loss": 5.9193, "step": 980 }, { "epoch": 9.81, "grad_norm": 45.40848922729492, "learning_rate": 6.756666666666667e-06, "loss": 3.6438, "step": 981 }, { "epoch": 9.82, "grad_norm": 55.738582611083984, "learning_rate": 6.753333333333334e-06, "loss": 3.1632, "step": 982 }, { "epoch": 9.83, "grad_norm": 51.84451675415039, "learning_rate": 6.750000000000001e-06, "loss": 3.0807, "step": 983 }, { "epoch": 9.84, "grad_norm": 84.72268676757812, "learning_rate": 6.746666666666667e-06, "loss": 3.3288, "step": 984 }, { "epoch": 9.85, "grad_norm": 231.41796875, "learning_rate": 6.743333333333334e-06, "loss": 2.7369, "step": 985 }, { "epoch": 9.86, "grad_norm": 56.34691619873047, "learning_rate": 6.740000000000001e-06, "loss": 3.3056, "step": 986 }, { "epoch": 9.87, "grad_norm": 145.26791381835938, "learning_rate": 6.736666666666667e-06, "loss": 2.6966, "step": 987 }, { "epoch": 9.88, "grad_norm": 54.62516403198242, "learning_rate": 6.733333333333334e-06, "loss": 2.8777, "step": 988 }, { "epoch": 9.89, "grad_norm": 35.68779754638672, "learning_rate": 6.730000000000001e-06, "loss": 3.2132, "step": 989 }, { "epoch": 9.9, "grad_norm": 44.25446701049805, "learning_rate": 6.726666666666667e-06, "loss": 3.0287, "step": 990 }, { "epoch": 9.91, "grad_norm": 217.48477172851562, "learning_rate": 6.723333333333334e-06, "loss": 2.5416, "step": 991 }, { "epoch": 9.92, "grad_norm": 37.738746643066406, "learning_rate": 6.720000000000001e-06, "loss": 2.7567, "step": 992 }, { "epoch": 9.93, "grad_norm": 158.26756286621094, "learning_rate": 6.716666666666667e-06, "loss": 2.5673, "step": 993 }, { "epoch": 9.94, "grad_norm": 25.842926025390625, "learning_rate": 6.713333333333334e-06, "loss": 6.3393, "step": 994 }, { "epoch": 9.95, "grad_norm": 63.653690338134766, "learning_rate": 6.710000000000001e-06, "loss": 2.7215, "step": 995 }, { "epoch": 9.96, "grad_norm": 43.5466423034668, "learning_rate": 6.706666666666667e-06, "loss": 3.4172, "step": 996 }, { "epoch": 9.97, "grad_norm": 44.704654693603516, "learning_rate": 6.703333333333334e-06, "loss": 3.3283, "step": 997 }, { "epoch": 9.98, "grad_norm": 32.3846321105957, "learning_rate": 6.700000000000001e-06, "loss": 3.1531, "step": 998 }, { "epoch": 9.99, "grad_norm": 82.55770111083984, "learning_rate": 6.696666666666667e-06, "loss": 3.5377, "step": 999 }, { "epoch": 10.0, "grad_norm": 50.84318161010742, "learning_rate": 6.693333333333334e-06, "loss": 2.8798, "step": 1000 }, { "epoch": 10.0, "eval_loss": 2.976698637008667, "eval_map": 0.003, "eval_map_50": 0.0068, "eval_map_75": 0.0022, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0003, "eval_map_medium": 0.0066, "eval_map_neckline": 0.0003, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1096, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0119, "eval_map_small": 0.003, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0034, "eval_mar_10": 0.0145, "eval_mar_100": 0.021, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0016, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4619, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.3965, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0186, "eval_mar_medium": 0.0343, "eval_mar_small": 0.0159, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.0085, "eval_samples_per_second": 5.261, "eval_steps_per_second": 1.315, "step": 1000 }, { "epoch": 10.01, "grad_norm": 55.55321502685547, "learning_rate": 6.690000000000001e-06, "loss": 3.6948, "step": 1001 }, { "epoch": 10.02, "grad_norm": 35.740501403808594, "learning_rate": 6.6866666666666665e-06, "loss": 3.1887, "step": 1002 }, { "epoch": 10.03, "grad_norm": 49.9847297668457, "learning_rate": 6.683333333333334e-06, "loss": 3.2392, "step": 1003 }, { "epoch": 10.04, "grad_norm": 43.56863784790039, "learning_rate": 6.680000000000001e-06, "loss": 3.2152, "step": 1004 }, { "epoch": 10.05, "grad_norm": 36.804725646972656, "learning_rate": 6.6766666666666666e-06, "loss": 3.3524, "step": 1005 }, { "epoch": 10.06, "grad_norm": 71.75359344482422, "learning_rate": 6.6733333333333335e-06, "loss": 2.7104, "step": 1006 }, { "epoch": 10.07, "grad_norm": 127.48563385009766, "learning_rate": 6.6700000000000005e-06, "loss": 3.0328, "step": 1007 }, { "epoch": 10.08, "grad_norm": 53.09702682495117, "learning_rate": 6.666666666666667e-06, "loss": 4.3278, "step": 1008 }, { "epoch": 10.09, "grad_norm": 42.25293731689453, "learning_rate": 6.663333333333334e-06, "loss": 2.7538, "step": 1009 }, { "epoch": 10.1, "grad_norm": 117.25181579589844, "learning_rate": 6.660000000000001e-06, "loss": 3.1965, "step": 1010 }, { "epoch": 10.11, "grad_norm": 37.83295822143555, "learning_rate": 6.656666666666667e-06, "loss": 3.0388, "step": 1011 }, { "epoch": 10.12, "grad_norm": 73.0062484741211, "learning_rate": 6.653333333333334e-06, "loss": 3.3421, "step": 1012 }, { "epoch": 10.13, "grad_norm": 50.69684600830078, "learning_rate": 6.650000000000001e-06, "loss": 2.7868, "step": 1013 }, { "epoch": 10.14, "grad_norm": 59.09663772583008, "learning_rate": 6.646666666666667e-06, "loss": 2.8885, "step": 1014 }, { "epoch": 10.15, "grad_norm": 65.6126708984375, "learning_rate": 6.643333333333334e-06, "loss": 3.4968, "step": 1015 }, { "epoch": 10.16, "grad_norm": 35.023319244384766, "learning_rate": 6.640000000000001e-06, "loss": 2.8137, "step": 1016 }, { "epoch": 10.17, "grad_norm": 160.48318481445312, "learning_rate": 6.636666666666667e-06, "loss": 2.9233, "step": 1017 }, { "epoch": 10.18, "grad_norm": 57.84931564331055, "learning_rate": 6.633333333333334e-06, "loss": 2.9531, "step": 1018 }, { "epoch": 10.19, "grad_norm": 79.03472137451172, "learning_rate": 6.630000000000001e-06, "loss": 2.9686, "step": 1019 }, { "epoch": 10.2, "grad_norm": 114.63299560546875, "learning_rate": 6.626666666666667e-06, "loss": 2.7807, "step": 1020 }, { "epoch": 10.21, "grad_norm": 51.04676818847656, "learning_rate": 6.623333333333334e-06, "loss": 3.7845, "step": 1021 }, { "epoch": 10.22, "grad_norm": 76.43449401855469, "learning_rate": 6.620000000000001e-06, "loss": 3.1778, "step": 1022 }, { "epoch": 10.23, "grad_norm": 56.32320785522461, "learning_rate": 6.616666666666667e-06, "loss": 3.5233, "step": 1023 }, { "epoch": 10.24, "grad_norm": 47.51877975463867, "learning_rate": 6.613333333333334e-06, "loss": 2.8102, "step": 1024 }, { "epoch": 10.25, "grad_norm": 77.50133514404297, "learning_rate": 6.610000000000001e-06, "loss": 3.1428, "step": 1025 }, { "epoch": 10.26, "grad_norm": 91.91705322265625, "learning_rate": 6.606666666666666e-06, "loss": 2.3933, "step": 1026 }, { "epoch": 10.27, "grad_norm": 27.56330108642578, "learning_rate": 6.603333333333334e-06, "loss": 3.678, "step": 1027 }, { "epoch": 10.28, "grad_norm": 43.72032165527344, "learning_rate": 6.600000000000001e-06, "loss": 3.5913, "step": 1028 }, { "epoch": 10.29, "grad_norm": 44.938819885253906, "learning_rate": 6.5966666666666665e-06, "loss": 3.1897, "step": 1029 }, { "epoch": 10.3, "grad_norm": 75.02011108398438, "learning_rate": 6.5933333333333335e-06, "loss": 3.3779, "step": 1030 }, { "epoch": 10.31, "grad_norm": 77.54203796386719, "learning_rate": 6.5900000000000004e-06, "loss": 2.976, "step": 1031 }, { "epoch": 10.32, "grad_norm": 134.1982421875, "learning_rate": 6.5866666666666666e-06, "loss": 2.8753, "step": 1032 }, { "epoch": 10.33, "grad_norm": 83.97195434570312, "learning_rate": 6.5833333333333335e-06, "loss": 3.1796, "step": 1033 }, { "epoch": 10.34, "grad_norm": 179.36952209472656, "learning_rate": 6.5800000000000005e-06, "loss": 2.8975, "step": 1034 }, { "epoch": 10.35, "grad_norm": 92.83879852294922, "learning_rate": 6.576666666666667e-06, "loss": 2.2507, "step": 1035 }, { "epoch": 10.36, "grad_norm": 100.28288269042969, "learning_rate": 6.573333333333334e-06, "loss": 3.4281, "step": 1036 }, { "epoch": 10.37, "grad_norm": 67.25664520263672, "learning_rate": 6.570000000000001e-06, "loss": 2.8387, "step": 1037 }, { "epoch": 10.38, "grad_norm": 53.58821105957031, "learning_rate": 6.566666666666667e-06, "loss": 3.0258, "step": 1038 }, { "epoch": 10.39, "grad_norm": 140.94142150878906, "learning_rate": 6.563333333333334e-06, "loss": 2.8943, "step": 1039 }, { "epoch": 10.4, "grad_norm": 120.34091186523438, "learning_rate": 6.560000000000001e-06, "loss": 3.168, "step": 1040 }, { "epoch": 10.41, "grad_norm": 60.11937713623047, "learning_rate": 6.556666666666667e-06, "loss": 2.9637, "step": 1041 }, { "epoch": 10.42, "grad_norm": 55.262550354003906, "learning_rate": 6.553333333333334e-06, "loss": 3.2833, "step": 1042 }, { "epoch": 10.43, "grad_norm": 48.13302993774414, "learning_rate": 6.550000000000001e-06, "loss": 3.6882, "step": 1043 }, { "epoch": 10.44, "grad_norm": 43.43091583251953, "learning_rate": 6.546666666666667e-06, "loss": 3.6927, "step": 1044 }, { "epoch": 10.45, "grad_norm": 33.02166748046875, "learning_rate": 6.543333333333334e-06, "loss": 3.8064, "step": 1045 }, { "epoch": 10.46, "grad_norm": 224.9364776611328, "learning_rate": 6.540000000000001e-06, "loss": 3.3619, "step": 1046 }, { "epoch": 10.47, "grad_norm": 47.00593185424805, "learning_rate": 6.536666666666667e-06, "loss": 2.9528, "step": 1047 }, { "epoch": 10.48, "grad_norm": 44.041786193847656, "learning_rate": 6.533333333333334e-06, "loss": 3.9209, "step": 1048 }, { "epoch": 10.49, "grad_norm": 227.52243041992188, "learning_rate": 6.530000000000001e-06, "loss": 2.6133, "step": 1049 }, { "epoch": 10.5, "grad_norm": 36.48926544189453, "learning_rate": 6.526666666666666e-06, "loss": 2.8805, "step": 1050 }, { "epoch": 10.5, "eval_loss": 2.9618566036224365, "eval_map": 0.0032, "eval_map_50": 0.0075, "eval_map_75": 0.0026, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0002, "eval_map_medium": 0.0065, "eval_map_neckline": 0.0085, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1099, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0145, "eval_map_small": 0.0042, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0038, "eval_mar_10": 0.0157, "eval_mar_100": 0.0227, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0254, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4866, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4183, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0178, "eval_mar_medium": 0.0366, "eval_mar_small": 0.0201, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.6675, "eval_samples_per_second": 5.357, "eval_steps_per_second": 1.339, "step": 1050 }, { "epoch": 10.51, "grad_norm": 52.814788818359375, "learning_rate": 6.523333333333334e-06, "loss": 3.474, "step": 1051 }, { "epoch": 10.52, "grad_norm": 44.68980407714844, "learning_rate": 6.520000000000001e-06, "loss": 2.9647, "step": 1052 }, { "epoch": 10.53, "grad_norm": 71.85285949707031, "learning_rate": 6.516666666666666e-06, "loss": 2.9399, "step": 1053 }, { "epoch": 10.54, "grad_norm": 243.17779541015625, "learning_rate": 6.513333333333333e-06, "loss": 2.5293, "step": 1054 }, { "epoch": 10.55, "grad_norm": 32.80049133300781, "learning_rate": 6.51e-06, "loss": 2.9778, "step": 1055 }, { "epoch": 10.56, "grad_norm": 43.29606628417969, "learning_rate": 6.5066666666666665e-06, "loss": 3.0926, "step": 1056 }, { "epoch": 10.57, "grad_norm": 35.44511795043945, "learning_rate": 6.5033333333333335e-06, "loss": 3.5172, "step": 1057 }, { "epoch": 10.58, "grad_norm": 39.908016204833984, "learning_rate": 6.5000000000000004e-06, "loss": 3.1795, "step": 1058 }, { "epoch": 10.59, "grad_norm": 46.81090545654297, "learning_rate": 6.496666666666667e-06, "loss": 3.2375, "step": 1059 }, { "epoch": 10.6, "grad_norm": 54.66707992553711, "learning_rate": 6.4933333333333336e-06, "loss": 2.3489, "step": 1060 }, { "epoch": 10.61, "grad_norm": 39.1214599609375, "learning_rate": 6.4900000000000005e-06, "loss": 3.4397, "step": 1061 }, { "epoch": 10.62, "grad_norm": 86.47811126708984, "learning_rate": 6.486666666666667e-06, "loss": 2.9046, "step": 1062 }, { "epoch": 10.63, "grad_norm": 72.93324279785156, "learning_rate": 6.483333333333334e-06, "loss": 3.2661, "step": 1063 }, { "epoch": 10.64, "grad_norm": 151.0166473388672, "learning_rate": 6.480000000000001e-06, "loss": 2.3181, "step": 1064 }, { "epoch": 10.65, "grad_norm": 97.0697021484375, "learning_rate": 6.476666666666667e-06, "loss": 3.7891, "step": 1065 }, { "epoch": 10.66, "grad_norm": 32.10997772216797, "learning_rate": 6.473333333333334e-06, "loss": 2.6126, "step": 1066 }, { "epoch": 10.67, "grad_norm": 97.91441345214844, "learning_rate": 6.470000000000001e-06, "loss": 3.5999, "step": 1067 }, { "epoch": 10.68, "grad_norm": 49.53647994995117, "learning_rate": 6.466666666666667e-06, "loss": 2.5739, "step": 1068 }, { "epoch": 10.69, "grad_norm": 105.99763488769531, "learning_rate": 6.463333333333334e-06, "loss": 3.0307, "step": 1069 }, { "epoch": 10.7, "grad_norm": 71.68827819824219, "learning_rate": 6.460000000000001e-06, "loss": 3.0623, "step": 1070 }, { "epoch": 10.71, "grad_norm": 124.65274810791016, "learning_rate": 6.456666666666668e-06, "loss": 3.021, "step": 1071 }, { "epoch": 10.72, "grad_norm": 81.1538314819336, "learning_rate": 6.453333333333334e-06, "loss": 2.9931, "step": 1072 }, { "epoch": 10.73, "grad_norm": 50.8171272277832, "learning_rate": 6.450000000000001e-06, "loss": 3.4314, "step": 1073 }, { "epoch": 10.74, "grad_norm": 45.132747650146484, "learning_rate": 6.446666666666668e-06, "loss": 2.7238, "step": 1074 }, { "epoch": 10.75, "grad_norm": 48.5260009765625, "learning_rate": 6.443333333333334e-06, "loss": 2.5047, "step": 1075 }, { "epoch": 10.76, "grad_norm": 57.979373931884766, "learning_rate": 6.440000000000001e-06, "loss": 3.3115, "step": 1076 }, { "epoch": 10.77, "grad_norm": 54.896949768066406, "learning_rate": 6.436666666666668e-06, "loss": 6.2774, "step": 1077 }, { "epoch": 10.78, "grad_norm": 51.3914794921875, "learning_rate": 6.433333333333333e-06, "loss": 2.857, "step": 1078 }, { "epoch": 10.79, "grad_norm": 123.19451141357422, "learning_rate": 6.43e-06, "loss": 2.7406, "step": 1079 }, { "epoch": 10.8, "grad_norm": 113.53748321533203, "learning_rate": 6.426666666666668e-06, "loss": 3.6535, "step": 1080 }, { "epoch": 10.81, "grad_norm": 55.424217224121094, "learning_rate": 6.423333333333333e-06, "loss": 2.9104, "step": 1081 }, { "epoch": 10.82, "grad_norm": 47.28289031982422, "learning_rate": 6.42e-06, "loss": 2.6298, "step": 1082 }, { "epoch": 10.83, "grad_norm": 118.047607421875, "learning_rate": 6.416666666666667e-06, "loss": 3.068, "step": 1083 }, { "epoch": 10.84, "grad_norm": 58.92686080932617, "learning_rate": 6.4133333333333335e-06, "loss": 2.9396, "step": 1084 }, { "epoch": 10.85, "grad_norm": 96.97832489013672, "learning_rate": 6.4100000000000005e-06, "loss": 2.6284, "step": 1085 }, { "epoch": 10.86, "grad_norm": 57.232383728027344, "learning_rate": 6.4066666666666674e-06, "loss": 2.3811, "step": 1086 }, { "epoch": 10.87, "grad_norm": 60.2580680847168, "learning_rate": 6.403333333333334e-06, "loss": 2.6644, "step": 1087 }, { "epoch": 10.88, "grad_norm": 68.96156311035156, "learning_rate": 6.4000000000000006e-06, "loss": 2.9867, "step": 1088 }, { "epoch": 10.89, "grad_norm": 67.42642211914062, "learning_rate": 6.3966666666666675e-06, "loss": 3.2873, "step": 1089 }, { "epoch": 10.9, "grad_norm": 52.49326705932617, "learning_rate": 6.393333333333334e-06, "loss": 2.5586, "step": 1090 }, { "epoch": 10.91, "grad_norm": 69.24246215820312, "learning_rate": 6.390000000000001e-06, "loss": 3.5119, "step": 1091 }, { "epoch": 10.92, "grad_norm": 71.38142395019531, "learning_rate": 6.386666666666668e-06, "loss": 2.8226, "step": 1092 }, { "epoch": 10.93, "grad_norm": 889.5325317382812, "learning_rate": 6.383333333333334e-06, "loss": 3.1214, "step": 1093 }, { "epoch": 10.94, "grad_norm": 50.83872985839844, "learning_rate": 6.380000000000001e-06, "loss": 3.1733, "step": 1094 }, { "epoch": 10.95, "grad_norm": 40.071144104003906, "learning_rate": 6.376666666666668e-06, "loss": 3.9119, "step": 1095 }, { "epoch": 10.96, "grad_norm": 98.46548461914062, "learning_rate": 6.373333333333334e-06, "loss": 6.1372, "step": 1096 }, { "epoch": 10.97, "grad_norm": 68.23551177978516, "learning_rate": 6.370000000000001e-06, "loss": 3.0642, "step": 1097 }, { "epoch": 10.98, "grad_norm": 93.5020751953125, "learning_rate": 6.366666666666668e-06, "loss": 3.5148, "step": 1098 }, { "epoch": 10.99, "grad_norm": 81.57500457763672, "learning_rate": 6.363333333333334e-06, "loss": 2.4173, "step": 1099 }, { "epoch": 11.0, "grad_norm": 93.81462097167969, "learning_rate": 6.360000000000001e-06, "loss": 3.0194, "step": 1100 }, { "epoch": 11.0, "eval_loss": 2.984597682952881, "eval_map": 0.0028, "eval_map_50": 0.007, "eval_map_75": 0.0022, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0002, "eval_map_medium": 0.0059, "eval_map_neckline": 0.0059, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.0963, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0137, "eval_map_small": 0.0027, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0042, "eval_mar_10": 0.0147, "eval_mar_100": 0.019, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0095, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4299, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.34, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0174, "eval_mar_medium": 0.0325, "eval_mar_small": 0.0131, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.8934, "eval_samples_per_second": 5.027, "eval_steps_per_second": 1.257, "step": 1100 }, { "epoch": 11.01, "grad_norm": 92.07966613769531, "learning_rate": 6.356666666666668e-06, "loss": 3.0022, "step": 1101 }, { "epoch": 11.02, "grad_norm": 106.64404296875, "learning_rate": 6.353333333333333e-06, "loss": 3.0696, "step": 1102 }, { "epoch": 11.03, "grad_norm": 224.2969970703125, "learning_rate": 6.35e-06, "loss": 3.1205, "step": 1103 }, { "epoch": 11.04, "grad_norm": 79.86857604980469, "learning_rate": 6.346666666666668e-06, "loss": 6.1092, "step": 1104 }, { "epoch": 11.05, "grad_norm": 49.06391143798828, "learning_rate": 6.343333333333333e-06, "loss": 3.6314, "step": 1105 }, { "epoch": 11.06, "grad_norm": 61.63262176513672, "learning_rate": 6.34e-06, "loss": 3.4347, "step": 1106 }, { "epoch": 11.07, "grad_norm": 88.46965026855469, "learning_rate": 6.336666666666667e-06, "loss": 3.5562, "step": 1107 }, { "epoch": 11.08, "grad_norm": 55.259193420410156, "learning_rate": 6.333333333333333e-06, "loss": 2.8892, "step": 1108 }, { "epoch": 11.09, "grad_norm": 70.70368194580078, "learning_rate": 6.33e-06, "loss": 2.9116, "step": 1109 }, { "epoch": 11.1, "grad_norm": 52.76454162597656, "learning_rate": 6.326666666666667e-06, "loss": 3.2083, "step": 1110 }, { "epoch": 11.11, "grad_norm": 209.08819580078125, "learning_rate": 6.3233333333333335e-06, "loss": 2.7419, "step": 1111 }, { "epoch": 11.12, "grad_norm": 63.71657180786133, "learning_rate": 6.3200000000000005e-06, "loss": 3.2328, "step": 1112 }, { "epoch": 11.13, "grad_norm": 71.37793731689453, "learning_rate": 6.3166666666666675e-06, "loss": 3.5397, "step": 1113 }, { "epoch": 11.14, "grad_norm": 87.6813735961914, "learning_rate": 6.313333333333334e-06, "loss": 2.8569, "step": 1114 }, { "epoch": 11.15, "grad_norm": 62.50037384033203, "learning_rate": 6.3100000000000006e-06, "loss": 2.8707, "step": 1115 }, { "epoch": 11.16, "grad_norm": 70.18060302734375, "learning_rate": 6.3066666666666676e-06, "loss": 3.1245, "step": 1116 }, { "epoch": 11.17, "grad_norm": 52.92352294921875, "learning_rate": 6.303333333333334e-06, "loss": 3.0588, "step": 1117 }, { "epoch": 11.18, "grad_norm": 87.81706237792969, "learning_rate": 6.300000000000001e-06, "loss": 3.4716, "step": 1118 }, { "epoch": 11.19, "grad_norm": 113.6163101196289, "learning_rate": 6.296666666666668e-06, "loss": 2.9694, "step": 1119 }, { "epoch": 11.2, "grad_norm": 38.04183578491211, "learning_rate": 6.293333333333334e-06, "loss": 3.2848, "step": 1120 }, { "epoch": 11.21, "grad_norm": 51.07545852661133, "learning_rate": 6.290000000000001e-06, "loss": 3.2299, "step": 1121 }, { "epoch": 11.22, "grad_norm": 85.43119812011719, "learning_rate": 6.286666666666668e-06, "loss": 2.809, "step": 1122 }, { "epoch": 11.23, "grad_norm": 79.13822174072266, "learning_rate": 6.283333333333334e-06, "loss": 3.3026, "step": 1123 }, { "epoch": 11.24, "grad_norm": 99.17243957519531, "learning_rate": 6.280000000000001e-06, "loss": 2.4474, "step": 1124 }, { "epoch": 11.25, "grad_norm": 48.68551254272461, "learning_rate": 6.276666666666668e-06, "loss": 2.938, "step": 1125 }, { "epoch": 11.26, "grad_norm": 54.73687744140625, "learning_rate": 6.273333333333333e-06, "loss": 2.9838, "step": 1126 }, { "epoch": 11.27, "grad_norm": 72.91519165039062, "learning_rate": 6.27e-06, "loss": 4.4982, "step": 1127 }, { "epoch": 11.28, "grad_norm": 36.91463088989258, "learning_rate": 6.266666666666668e-06, "loss": 3.3121, "step": 1128 }, { "epoch": 11.29, "grad_norm": 64.58856201171875, "learning_rate": 6.263333333333333e-06, "loss": 2.9645, "step": 1129 }, { "epoch": 11.3, "grad_norm": 49.40869140625, "learning_rate": 6.26e-06, "loss": 2.9231, "step": 1130 }, { "epoch": 11.31, "grad_norm": 63.96097183227539, "learning_rate": 6.256666666666667e-06, "loss": 3.1914, "step": 1131 }, { "epoch": 11.32, "grad_norm": 128.61270141601562, "learning_rate": 6.253333333333333e-06, "loss": 2.5026, "step": 1132 }, { "epoch": 11.33, "grad_norm": 41.810611724853516, "learning_rate": 6.25e-06, "loss": 2.875, "step": 1133 }, { "epoch": 11.34, "grad_norm": 74.16030883789062, "learning_rate": 6.246666666666667e-06, "loss": 3.146, "step": 1134 }, { "epoch": 11.35, "grad_norm": 58.263118743896484, "learning_rate": 6.243333333333333e-06, "loss": 3.0732, "step": 1135 }, { "epoch": 11.36, "grad_norm": 59.20132827758789, "learning_rate": 6.24e-06, "loss": 2.5571, "step": 1136 }, { "epoch": 11.37, "grad_norm": 58.28806686401367, "learning_rate": 6.236666666666667e-06, "loss": 3.0977, "step": 1137 }, { "epoch": 11.38, "grad_norm": 37.501861572265625, "learning_rate": 6.2333333333333335e-06, "loss": 2.5089, "step": 1138 }, { "epoch": 11.39, "grad_norm": 37.30469512939453, "learning_rate": 6.2300000000000005e-06, "loss": 3.0995, "step": 1139 }, { "epoch": 11.4, "grad_norm": 115.07567596435547, "learning_rate": 6.2266666666666675e-06, "loss": 3.268, "step": 1140 }, { "epoch": 11.41, "grad_norm": 79.97010803222656, "learning_rate": 6.223333333333334e-06, "loss": 3.1036, "step": 1141 }, { "epoch": 11.42, "grad_norm": 88.30361938476562, "learning_rate": 6.220000000000001e-06, "loss": 2.5054, "step": 1142 }, { "epoch": 11.43, "grad_norm": 74.10979461669922, "learning_rate": 6.2166666666666676e-06, "loss": 3.3303, "step": 1143 }, { "epoch": 11.44, "grad_norm": 92.78433227539062, "learning_rate": 6.213333333333334e-06, "loss": 2.9943, "step": 1144 }, { "epoch": 11.45, "grad_norm": 68.15824890136719, "learning_rate": 6.210000000000001e-06, "loss": 2.7237, "step": 1145 }, { "epoch": 11.46, "grad_norm": 43.9627799987793, "learning_rate": 6.206666666666668e-06, "loss": 2.3167, "step": 1146 }, { "epoch": 11.47, "grad_norm": 71.53556823730469, "learning_rate": 6.203333333333334e-06, "loss": 3.0123, "step": 1147 }, { "epoch": 11.48, "grad_norm": 39.018348693847656, "learning_rate": 6.200000000000001e-06, "loss": 3.0739, "step": 1148 }, { "epoch": 11.49, "grad_norm": 60.233436584472656, "learning_rate": 6.196666666666668e-06, "loss": 2.5758, "step": 1149 }, { "epoch": 11.5, "grad_norm": 36.92890167236328, "learning_rate": 6.193333333333333e-06, "loss": 3.6797, "step": 1150 }, { "epoch": 11.5, "eval_loss": 2.933140754699707, "eval_map": 0.0034, "eval_map_50": 0.0079, "eval_map_75": 0.0025, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0003, "eval_map_medium": 0.0082, "eval_map_neckline": 0.0187, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1041, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0174, "eval_map_small": 0.0033, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.005, "eval_mar_10": 0.017, "eval_mar_100": 0.0224, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0571, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4642, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.3957, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0169, "eval_mar_medium": 0.0385, "eval_mar_small": 0.0182, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.3018, "eval_samples_per_second": 5.464, "eval_steps_per_second": 1.366, "step": 1150 }, { "epoch": 11.51, "grad_norm": 49.18547058105469, "learning_rate": 6.190000000000001e-06, "loss": 3.8302, "step": 1151 }, { "epoch": 11.52, "grad_norm": 27.423803329467773, "learning_rate": 6.186666666666668e-06, "loss": 3.1434, "step": 1152 }, { "epoch": 11.53, "grad_norm": 75.67265319824219, "learning_rate": 6.183333333333333e-06, "loss": 2.5516, "step": 1153 }, { "epoch": 11.54, "grad_norm": 43.592124938964844, "learning_rate": 6.18e-06, "loss": 3.4308, "step": 1154 }, { "epoch": 11.55, "grad_norm": 53.793216705322266, "learning_rate": 6.176666666666667e-06, "loss": 2.9557, "step": 1155 }, { "epoch": 11.56, "grad_norm": 112.98760986328125, "learning_rate": 6.173333333333333e-06, "loss": 3.6208, "step": 1156 }, { "epoch": 11.57, "grad_norm": 112.87918090820312, "learning_rate": 6.17e-06, "loss": 3.0566, "step": 1157 }, { "epoch": 11.58, "grad_norm": 67.0530014038086, "learning_rate": 6.166666666666667e-06, "loss": 3.7063, "step": 1158 }, { "epoch": 11.59, "grad_norm": 92.61679077148438, "learning_rate": 6.163333333333333e-06, "loss": 2.6202, "step": 1159 }, { "epoch": 11.6, "grad_norm": 51.205047607421875, "learning_rate": 6.16e-06, "loss": 2.8318, "step": 1160 }, { "epoch": 11.61, "grad_norm": 55.867164611816406, "learning_rate": 6.156666666666667e-06, "loss": 3.5799, "step": 1161 }, { "epoch": 11.62, "grad_norm": 92.4887924194336, "learning_rate": 6.1533333333333334e-06, "loss": 2.0232, "step": 1162 }, { "epoch": 11.63, "grad_norm": 54.29061508178711, "learning_rate": 6.15e-06, "loss": 2.5175, "step": 1163 }, { "epoch": 11.64, "grad_norm": 29.540096282958984, "learning_rate": 6.146666666666667e-06, "loss": 5.9645, "step": 1164 }, { "epoch": 11.65, "grad_norm": 52.80303955078125, "learning_rate": 6.1433333333333335e-06, "loss": 3.4338, "step": 1165 }, { "epoch": 11.66, "grad_norm": 61.18094253540039, "learning_rate": 6.1400000000000005e-06, "loss": 3.9263, "step": 1166 }, { "epoch": 11.67, "grad_norm": 138.59097290039062, "learning_rate": 6.1366666666666675e-06, "loss": 3.0852, "step": 1167 }, { "epoch": 11.68, "grad_norm": 46.001888275146484, "learning_rate": 6.133333333333334e-06, "loss": 2.8756, "step": 1168 }, { "epoch": 11.69, "grad_norm": 43.51438903808594, "learning_rate": 6.130000000000001e-06, "loss": 3.4274, "step": 1169 }, { "epoch": 11.7, "grad_norm": 42.65748977661133, "learning_rate": 6.126666666666668e-06, "loss": 3.1735, "step": 1170 }, { "epoch": 11.71, "grad_norm": 197.04246520996094, "learning_rate": 6.123333333333334e-06, "loss": 2.5916, "step": 1171 }, { "epoch": 11.72, "grad_norm": 62.736419677734375, "learning_rate": 6.120000000000001e-06, "loss": 3.5663, "step": 1172 }, { "epoch": 11.73, "grad_norm": 112.65035247802734, "learning_rate": 6.116666666666668e-06, "loss": 2.7746, "step": 1173 }, { "epoch": 11.74, "grad_norm": 75.29417419433594, "learning_rate": 6.113333333333333e-06, "loss": 3.2606, "step": 1174 }, { "epoch": 11.75, "grad_norm": 46.563720703125, "learning_rate": 6.110000000000001e-06, "loss": 3.2228, "step": 1175 }, { "epoch": 11.76, "grad_norm": 68.38220977783203, "learning_rate": 6.106666666666668e-06, "loss": 3.3296, "step": 1176 }, { "epoch": 11.77, "grad_norm": 77.16913604736328, "learning_rate": 6.103333333333333e-06, "loss": 3.0446, "step": 1177 }, { "epoch": 11.78, "grad_norm": 40.76362228393555, "learning_rate": 6.1e-06, "loss": 3.0458, "step": 1178 }, { "epoch": 11.79, "grad_norm": 57.107582092285156, "learning_rate": 6.096666666666667e-06, "loss": 2.8786, "step": 1179 }, { "epoch": 11.8, "grad_norm": 69.27469635009766, "learning_rate": 6.093333333333333e-06, "loss": 2.8258, "step": 1180 }, { "epoch": 11.81, "grad_norm": 104.06226348876953, "learning_rate": 6.09e-06, "loss": 3.3634, "step": 1181 }, { "epoch": 11.82, "grad_norm": 140.79476928710938, "learning_rate": 6.086666666666667e-06, "loss": 2.9085, "step": 1182 }, { "epoch": 11.83, "grad_norm": 76.3993911743164, "learning_rate": 6.083333333333333e-06, "loss": 2.7782, "step": 1183 }, { "epoch": 11.84, "grad_norm": 34.90277099609375, "learning_rate": 6.08e-06, "loss": 2.563, "step": 1184 }, { "epoch": 11.85, "grad_norm": 58.66377639770508, "learning_rate": 6.076666666666667e-06, "loss": 3.2514, "step": 1185 }, { "epoch": 11.86, "grad_norm": 177.22792053222656, "learning_rate": 6.073333333333333e-06, "loss": 2.3272, "step": 1186 }, { "epoch": 11.87, "grad_norm": 84.7278060913086, "learning_rate": 6.07e-06, "loss": 2.8488, "step": 1187 }, { "epoch": 11.88, "grad_norm": 61.57284927368164, "learning_rate": 6.066666666666667e-06, "loss": 3.2652, "step": 1188 }, { "epoch": 11.89, "grad_norm": 71.58474731445312, "learning_rate": 6.0633333333333334e-06, "loss": 2.7583, "step": 1189 }, { "epoch": 11.9, "grad_norm": 31.44135856628418, "learning_rate": 6.0600000000000004e-06, "loss": 3.0422, "step": 1190 }, { "epoch": 11.91, "grad_norm": 53.961334228515625, "learning_rate": 6.056666666666667e-06, "loss": 2.7036, "step": 1191 }, { "epoch": 11.92, "grad_norm": 40.71628952026367, "learning_rate": 6.0533333333333335e-06, "loss": 2.886, "step": 1192 }, { "epoch": 11.93, "grad_norm": 38.50099563598633, "learning_rate": 6.0500000000000005e-06, "loss": 2.6756, "step": 1193 }, { "epoch": 11.94, "grad_norm": 35.675289154052734, "learning_rate": 6.0466666666666675e-06, "loss": 3.6137, "step": 1194 }, { "epoch": 11.95, "grad_norm": 86.3695068359375, "learning_rate": 6.043333333333334e-06, "loss": 3.1999, "step": 1195 }, { "epoch": 11.96, "grad_norm": 96.34136962890625, "learning_rate": 6.040000000000001e-06, "loss": 2.5288, "step": 1196 }, { "epoch": 11.97, "grad_norm": 82.77781677246094, "learning_rate": 6.036666666666668e-06, "loss": 3.0529, "step": 1197 }, { "epoch": 11.98, "grad_norm": 101.15055847167969, "learning_rate": 6.033333333333335e-06, "loss": 2.3774, "step": 1198 }, { "epoch": 11.99, "grad_norm": 78.0596923828125, "learning_rate": 6.030000000000001e-06, "loss": 2.8738, "step": 1199 }, { "epoch": 12.0, "grad_norm": 44.033531188964844, "learning_rate": 6.026666666666668e-06, "loss": 3.6285, "step": 1200 }, { "epoch": 12.0, "eval_loss": 2.9317779541015625, "eval_map": 0.0034, "eval_map_50": 0.008, "eval_map_75": 0.0029, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0002, "eval_map_medium": 0.0082, "eval_map_neckline": 0.0165, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1084, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0161, "eval_map_small": 0.0036, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0051, "eval_mar_10": 0.018, "eval_mar_100": 0.0233, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0746, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5119, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.3678, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0165, "eval_mar_medium": 0.0396, "eval_mar_small": 0.0185, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.4934, "eval_samples_per_second": 5.407, "eval_steps_per_second": 1.352, "step": 1200 }, { "epoch": 12.01, "grad_norm": 38.9415283203125, "learning_rate": 6.023333333333335e-06, "loss": 3.6164, "step": 1201 }, { "epoch": 12.02, "grad_norm": 56.324424743652344, "learning_rate": 6.02e-06, "loss": 2.2142, "step": 1202 }, { "epoch": 12.03, "grad_norm": 63.35586929321289, "learning_rate": 6.016666666666667e-06, "loss": 3.3842, "step": 1203 }, { "epoch": 12.04, "grad_norm": 90.83233642578125, "learning_rate": 6.013333333333335e-06, "loss": 2.9029, "step": 1204 }, { "epoch": 12.05, "grad_norm": 52.761138916015625, "learning_rate": 6.01e-06, "loss": 3.7216, "step": 1205 }, { "epoch": 12.06, "grad_norm": 93.8703842163086, "learning_rate": 6.006666666666667e-06, "loss": 2.5822, "step": 1206 }, { "epoch": 12.07, "grad_norm": 76.19731140136719, "learning_rate": 6.003333333333334e-06, "loss": 3.0485, "step": 1207 }, { "epoch": 12.08, "grad_norm": 104.82487487792969, "learning_rate": 6e-06, "loss": 3.3171, "step": 1208 }, { "epoch": 12.09, "grad_norm": 104.84453582763672, "learning_rate": 5.996666666666667e-06, "loss": 3.6784, "step": 1209 }, { "epoch": 12.1, "grad_norm": 43.317474365234375, "learning_rate": 5.993333333333334e-06, "loss": 3.4506, "step": 1210 }, { "epoch": 12.11, "grad_norm": 68.1817398071289, "learning_rate": 5.99e-06, "loss": 2.5716, "step": 1211 }, { "epoch": 12.12, "grad_norm": 100.78235626220703, "learning_rate": 5.986666666666667e-06, "loss": 3.4215, "step": 1212 }, { "epoch": 12.13, "grad_norm": 73.67121124267578, "learning_rate": 5.983333333333334e-06, "loss": 3.3682, "step": 1213 }, { "epoch": 12.14, "grad_norm": 54.88943862915039, "learning_rate": 5.98e-06, "loss": 2.6263, "step": 1214 }, { "epoch": 12.15, "grad_norm": 58.69312286376953, "learning_rate": 5.976666666666667e-06, "loss": 2.9164, "step": 1215 }, { "epoch": 12.16, "grad_norm": 36.30592727661133, "learning_rate": 5.973333333333334e-06, "loss": 3.5109, "step": 1216 }, { "epoch": 12.17, "grad_norm": 58.996429443359375, "learning_rate": 5.9700000000000004e-06, "loss": 2.9856, "step": 1217 }, { "epoch": 12.18, "grad_norm": 35.717552185058594, "learning_rate": 5.966666666666667e-06, "loss": 6.4484, "step": 1218 }, { "epoch": 12.19, "grad_norm": 36.69700622558594, "learning_rate": 5.963333333333334e-06, "loss": 2.5317, "step": 1219 }, { "epoch": 12.2, "grad_norm": 40.40568923950195, "learning_rate": 5.9600000000000005e-06, "loss": 2.9612, "step": 1220 }, { "epoch": 12.21, "grad_norm": 47.42739486694336, "learning_rate": 5.9566666666666675e-06, "loss": 3.2685, "step": 1221 }, { "epoch": 12.22, "grad_norm": 100.94508361816406, "learning_rate": 5.9533333333333345e-06, "loss": 3.4558, "step": 1222 }, { "epoch": 12.23, "grad_norm": 64.62818908691406, "learning_rate": 5.950000000000001e-06, "loss": 2.617, "step": 1223 }, { "epoch": 12.24, "grad_norm": 126.42255401611328, "learning_rate": 5.946666666666668e-06, "loss": 3.2282, "step": 1224 }, { "epoch": 12.25, "grad_norm": 74.44979858398438, "learning_rate": 5.943333333333335e-06, "loss": 2.5641, "step": 1225 }, { "epoch": 12.26, "grad_norm": 41.44639587402344, "learning_rate": 5.94e-06, "loss": 2.1707, "step": 1226 }, { "epoch": 12.27, "grad_norm": 54.795387268066406, "learning_rate": 5.936666666666667e-06, "loss": 3.1368, "step": 1227 }, { "epoch": 12.28, "grad_norm": 44.15462875366211, "learning_rate": 5.933333333333335e-06, "loss": 2.9674, "step": 1228 }, { "epoch": 12.29, "grad_norm": 81.4231185913086, "learning_rate": 5.93e-06, "loss": 3.5146, "step": 1229 }, { "epoch": 12.3, "grad_norm": 66.4217300415039, "learning_rate": 5.926666666666667e-06, "loss": 3.3714, "step": 1230 }, { "epoch": 12.31, "grad_norm": 71.2398452758789, "learning_rate": 5.923333333333334e-06, "loss": 3.1441, "step": 1231 }, { "epoch": 12.32, "grad_norm": 102.66434478759766, "learning_rate": 5.92e-06, "loss": 2.4226, "step": 1232 }, { "epoch": 12.33, "grad_norm": 106.76792907714844, "learning_rate": 5.916666666666667e-06, "loss": 3.5069, "step": 1233 }, { "epoch": 12.34, "grad_norm": 41.807647705078125, "learning_rate": 5.913333333333334e-06, "loss": 2.9436, "step": 1234 }, { "epoch": 12.35, "grad_norm": 81.01720428466797, "learning_rate": 5.91e-06, "loss": 2.4202, "step": 1235 }, { "epoch": 12.36, "grad_norm": 69.07759094238281, "learning_rate": 5.906666666666667e-06, "loss": 3.4512, "step": 1236 }, { "epoch": 12.37, "grad_norm": 117.66728210449219, "learning_rate": 5.903333333333334e-06, "loss": 3.1618, "step": 1237 }, { "epoch": 12.38, "grad_norm": 41.70170974731445, "learning_rate": 5.9e-06, "loss": 3.1909, "step": 1238 }, { "epoch": 12.39, "grad_norm": 31.07749366760254, "learning_rate": 5.896666666666667e-06, "loss": 3.7667, "step": 1239 }, { "epoch": 12.4, "grad_norm": 88.17090606689453, "learning_rate": 5.893333333333334e-06, "loss": 4.684, "step": 1240 }, { "epoch": 12.41, "grad_norm": 50.30838394165039, "learning_rate": 5.89e-06, "loss": 2.8655, "step": 1241 }, { "epoch": 12.42, "grad_norm": 74.959716796875, "learning_rate": 5.886666666666667e-06, "loss": 3.0481, "step": 1242 }, { "epoch": 12.43, "grad_norm": 79.93954467773438, "learning_rate": 5.883333333333334e-06, "loss": 3.2333, "step": 1243 }, { "epoch": 12.44, "grad_norm": 63.89908218383789, "learning_rate": 5.8800000000000005e-06, "loss": 2.8055, "step": 1244 }, { "epoch": 12.45, "grad_norm": 53.9018440246582, "learning_rate": 5.8766666666666674e-06, "loss": 3.0191, "step": 1245 }, { "epoch": 12.46, "grad_norm": 346.02252197265625, "learning_rate": 5.873333333333334e-06, "loss": 3.2419, "step": 1246 }, { "epoch": 12.47, "grad_norm": 49.14729309082031, "learning_rate": 5.8700000000000005e-06, "loss": 2.5227, "step": 1247 }, { "epoch": 12.48, "grad_norm": 62.63964080810547, "learning_rate": 5.8666666666666675e-06, "loss": 2.8969, "step": 1248 }, { "epoch": 12.49, "grad_norm": 71.53133392333984, "learning_rate": 5.8633333333333345e-06, "loss": 2.1919, "step": 1249 }, { "epoch": 12.5, "grad_norm": 66.27953338623047, "learning_rate": 5.86e-06, "loss": 3.1858, "step": 1250 }, { "epoch": 12.5, "eval_loss": 2.9114532470703125, "eval_map": 0.0035, "eval_map_50": 0.0077, "eval_map_75": 0.0028, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0002, "eval_map_medium": 0.0073, "eval_map_neckline": 0.0106, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1128, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0193, "eval_map_small": 0.0039, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.005, "eval_mar_10": 0.0189, "eval_mar_100": 0.024, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0889, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4896, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4043, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0202, "eval_mar_medium": 0.0404, "eval_mar_small": 0.0191, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.3996, "eval_samples_per_second": 5.155, "eval_steps_per_second": 1.289, "step": 1250 }, { "epoch": 12.51, "grad_norm": 176.49539184570312, "learning_rate": 5.856666666666667e-06, "loss": 2.73, "step": 1251 }, { "epoch": 12.52, "grad_norm": 275.29644775390625, "learning_rate": 5.853333333333335e-06, "loss": 2.8748, "step": 1252 }, { "epoch": 12.53, "grad_norm": 127.10897064208984, "learning_rate": 5.85e-06, "loss": 3.1049, "step": 1253 }, { "epoch": 12.54, "grad_norm": 57.90776443481445, "learning_rate": 5.846666666666667e-06, "loss": 2.6313, "step": 1254 }, { "epoch": 12.55, "grad_norm": 102.28982543945312, "learning_rate": 5.843333333333334e-06, "loss": 2.1681, "step": 1255 }, { "epoch": 12.56, "grad_norm": 184.9008026123047, "learning_rate": 5.84e-06, "loss": 3.0815, "step": 1256 }, { "epoch": 12.57, "grad_norm": 153.85386657714844, "learning_rate": 5.836666666666667e-06, "loss": 2.9113, "step": 1257 }, { "epoch": 12.58, "grad_norm": 409.3955078125, "learning_rate": 5.833333333333334e-06, "loss": 3.6304, "step": 1258 }, { "epoch": 12.59, "grad_norm": 77.55615234375, "learning_rate": 5.83e-06, "loss": 3.0032, "step": 1259 }, { "epoch": 12.6, "grad_norm": 40.85903549194336, "learning_rate": 5.826666666666667e-06, "loss": 3.3199, "step": 1260 }, { "epoch": 12.61, "grad_norm": 63.750614166259766, "learning_rate": 5.823333333333334e-06, "loss": 2.8212, "step": 1261 }, { "epoch": 12.62, "grad_norm": 73.96086883544922, "learning_rate": 5.82e-06, "loss": 2.2397, "step": 1262 }, { "epoch": 12.63, "grad_norm": 108.52780151367188, "learning_rate": 5.816666666666667e-06, "loss": 2.7212, "step": 1263 }, { "epoch": 12.64, "grad_norm": 44.90442657470703, "learning_rate": 5.813333333333334e-06, "loss": 2.8472, "step": 1264 }, { "epoch": 12.65, "grad_norm": 62.11616134643555, "learning_rate": 5.81e-06, "loss": 2.7104, "step": 1265 }, { "epoch": 12.66, "grad_norm": 45.02907180786133, "learning_rate": 5.806666666666667e-06, "loss": 3.2367, "step": 1266 }, { "epoch": 12.67, "grad_norm": 43.471771240234375, "learning_rate": 5.803333333333334e-06, "loss": 3.4676, "step": 1267 }, { "epoch": 12.68, "grad_norm": 313.18145751953125, "learning_rate": 5.8e-06, "loss": 3.1755, "step": 1268 }, { "epoch": 12.69, "grad_norm": 43.079959869384766, "learning_rate": 5.796666666666667e-06, "loss": 3.2447, "step": 1269 }, { "epoch": 12.7, "grad_norm": 34.68674087524414, "learning_rate": 5.793333333333334e-06, "loss": 3.5012, "step": 1270 }, { "epoch": 12.71, "grad_norm": 84.24726104736328, "learning_rate": 5.7900000000000005e-06, "loss": 2.698, "step": 1271 }, { "epoch": 12.72, "grad_norm": 48.009971618652344, "learning_rate": 5.7866666666666674e-06, "loss": 2.554, "step": 1272 }, { "epoch": 12.73, "grad_norm": 57.68301010131836, "learning_rate": 5.7833333333333344e-06, "loss": 3.1009, "step": 1273 }, { "epoch": 12.74, "grad_norm": 72.031494140625, "learning_rate": 5.78e-06, "loss": 3.5931, "step": 1274 }, { "epoch": 12.75, "grad_norm": 58.33479309082031, "learning_rate": 5.776666666666667e-06, "loss": 2.9124, "step": 1275 }, { "epoch": 12.76, "grad_norm": 73.76160430908203, "learning_rate": 5.7733333333333345e-06, "loss": 2.8278, "step": 1276 }, { "epoch": 12.77, "grad_norm": 107.28427124023438, "learning_rate": 5.77e-06, "loss": 2.2636, "step": 1277 }, { "epoch": 12.78, "grad_norm": 32.92682647705078, "learning_rate": 5.766666666666667e-06, "loss": 3.1635, "step": 1278 }, { "epoch": 12.79, "grad_norm": 78.51351165771484, "learning_rate": 5.763333333333334e-06, "loss": 2.8311, "step": 1279 }, { "epoch": 12.8, "grad_norm": 40.24061965942383, "learning_rate": 5.76e-06, "loss": 2.4459, "step": 1280 }, { "epoch": 12.81, "grad_norm": 46.80813980102539, "learning_rate": 5.756666666666667e-06, "loss": 3.2849, "step": 1281 }, { "epoch": 12.82, "grad_norm": 43.42023468017578, "learning_rate": 5.753333333333334e-06, "loss": 3.1956, "step": 1282 }, { "epoch": 12.83, "grad_norm": 48.31600570678711, "learning_rate": 5.75e-06, "loss": 2.8685, "step": 1283 }, { "epoch": 12.84, "grad_norm": 92.31568145751953, "learning_rate": 5.746666666666667e-06, "loss": 3.2687, "step": 1284 }, { "epoch": 12.85, "grad_norm": 34.4620475769043, "learning_rate": 5.743333333333334e-06, "loss": 3.3769, "step": 1285 }, { "epoch": 12.86, "grad_norm": 30.371427536010742, "learning_rate": 5.74e-06, "loss": 2.5358, "step": 1286 }, { "epoch": 12.87, "grad_norm": 187.1854248046875, "learning_rate": 5.736666666666667e-06, "loss": 3.1018, "step": 1287 }, { "epoch": 12.88, "grad_norm": 78.2849349975586, "learning_rate": 5.733333333333334e-06, "loss": 3.3206, "step": 1288 }, { "epoch": 12.89, "grad_norm": 50.985374450683594, "learning_rate": 5.73e-06, "loss": 3.5859, "step": 1289 }, { "epoch": 12.9, "grad_norm": 84.56413269042969, "learning_rate": 5.726666666666667e-06, "loss": 6.1009, "step": 1290 }, { "epoch": 12.91, "grad_norm": 119.13943481445312, "learning_rate": 5.723333333333334e-06, "loss": 3.1467, "step": 1291 }, { "epoch": 12.92, "grad_norm": 39.377498626708984, "learning_rate": 5.72e-06, "loss": 3.3134, "step": 1292 }, { "epoch": 12.93, "grad_norm": 221.7963104248047, "learning_rate": 5.716666666666667e-06, "loss": 3.336, "step": 1293 }, { "epoch": 12.94, "grad_norm": 56.01435852050781, "learning_rate": 5.713333333333334e-06, "loss": 3.0024, "step": 1294 }, { "epoch": 12.95, "grad_norm": 70.97488403320312, "learning_rate": 5.71e-06, "loss": 3.3312, "step": 1295 }, { "epoch": 12.96, "grad_norm": 61.824222564697266, "learning_rate": 5.706666666666667e-06, "loss": 2.7646, "step": 1296 }, { "epoch": 12.97, "grad_norm": 370.12750244140625, "learning_rate": 5.703333333333334e-06, "loss": 2.697, "step": 1297 }, { "epoch": 12.98, "grad_norm": 27.270084381103516, "learning_rate": 5.7e-06, "loss": 2.2781, "step": 1298 }, { "epoch": 12.99, "grad_norm": 38.875553131103516, "learning_rate": 5.696666666666667e-06, "loss": 2.814, "step": 1299 }, { "epoch": 13.0, "grad_norm": 30.18079948425293, "learning_rate": 5.6933333333333344e-06, "loss": 3.5477, "step": 1300 }, { "epoch": 13.0, "eval_loss": 2.973367214202881, "eval_map": 0.0043, "eval_map_50": 0.0093, "eval_map_75": 0.0037, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0002, "eval_map_medium": 0.0088, "eval_map_neckline": 0.0166, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1287, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0297, "eval_map_small": 0.0043, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0059, "eval_mar_10": 0.0175, "eval_mar_100": 0.0219, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0508, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4813, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.3661, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0227, "eval_mar_medium": 0.0361, "eval_mar_small": 0.02, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.8424, "eval_samples_per_second": 5.307, "eval_steps_per_second": 1.327, "step": 1300 }, { "epoch": 13.01, "grad_norm": 48.18246841430664, "learning_rate": 5.69e-06, "loss": 2.8546, "step": 1301 }, { "epoch": 13.02, "grad_norm": 42.446781158447266, "learning_rate": 5.686666666666667e-06, "loss": 4.8444, "step": 1302 }, { "epoch": 13.03, "grad_norm": 59.65789031982422, "learning_rate": 5.683333333333334e-06, "loss": 2.7667, "step": 1303 }, { "epoch": 13.04, "grad_norm": 155.79747009277344, "learning_rate": 5.68e-06, "loss": 2.8347, "step": 1304 }, { "epoch": 13.05, "grad_norm": 52.55629348754883, "learning_rate": 5.676666666666667e-06, "loss": 3.3209, "step": 1305 }, { "epoch": 13.06, "grad_norm": 190.10366821289062, "learning_rate": 5.673333333333334e-06, "loss": 3.2607, "step": 1306 }, { "epoch": 13.07, "grad_norm": 316.22540283203125, "learning_rate": 5.67e-06, "loss": 2.6034, "step": 1307 }, { "epoch": 13.08, "grad_norm": 88.2117691040039, "learning_rate": 5.666666666666667e-06, "loss": 2.4783, "step": 1308 }, { "epoch": 13.09, "grad_norm": 100.9051284790039, "learning_rate": 5.663333333333334e-06, "loss": 3.0014, "step": 1309 }, { "epoch": 13.1, "grad_norm": 171.91290283203125, "learning_rate": 5.66e-06, "loss": 4.2731, "step": 1310 }, { "epoch": 13.11, "grad_norm": 56.72378921508789, "learning_rate": 5.656666666666667e-06, "loss": 3.2895, "step": 1311 }, { "epoch": 13.12, "grad_norm": 67.06344604492188, "learning_rate": 5.653333333333334e-06, "loss": 3.0893, "step": 1312 }, { "epoch": 13.13, "grad_norm": 99.89654541015625, "learning_rate": 5.65e-06, "loss": 2.2601, "step": 1313 }, { "epoch": 13.14, "grad_norm": 25.040376663208008, "learning_rate": 5.646666666666667e-06, "loss": 2.8311, "step": 1314 }, { "epoch": 13.15, "grad_norm": 49.77873992919922, "learning_rate": 5.643333333333334e-06, "loss": 2.5696, "step": 1315 }, { "epoch": 13.16, "grad_norm": 52.81875228881836, "learning_rate": 5.64e-06, "loss": 3.0054, "step": 1316 }, { "epoch": 13.17, "grad_norm": 52.56574249267578, "learning_rate": 5.636666666666667e-06, "loss": 3.0636, "step": 1317 }, { "epoch": 13.18, "grad_norm": 316.5511779785156, "learning_rate": 5.633333333333334e-06, "loss": 2.7183, "step": 1318 }, { "epoch": 13.19, "grad_norm": 48.97709274291992, "learning_rate": 5.63e-06, "loss": 3.1548, "step": 1319 }, { "epoch": 13.2, "grad_norm": 108.84138488769531, "learning_rate": 5.626666666666667e-06, "loss": 3.1972, "step": 1320 }, { "epoch": 13.21, "grad_norm": 286.5662536621094, "learning_rate": 5.623333333333334e-06, "loss": 2.8496, "step": 1321 }, { "epoch": 13.22, "grad_norm": 53.888404846191406, "learning_rate": 5.620000000000001e-06, "loss": 2.7828, "step": 1322 }, { "epoch": 13.23, "grad_norm": 48.89422607421875, "learning_rate": 5.6166666666666665e-06, "loss": 2.659, "step": 1323 }, { "epoch": 13.24, "grad_norm": 54.42698287963867, "learning_rate": 5.613333333333334e-06, "loss": 3.4533, "step": 1324 }, { "epoch": 13.25, "grad_norm": 62.72694396972656, "learning_rate": 5.610000000000001e-06, "loss": 2.9667, "step": 1325 }, { "epoch": 13.26, "grad_norm": 129.79527282714844, "learning_rate": 5.606666666666667e-06, "loss": 3.2245, "step": 1326 }, { "epoch": 13.27, "grad_norm": 43.31553649902344, "learning_rate": 5.603333333333334e-06, "loss": 3.113, "step": 1327 }, { "epoch": 13.28, "grad_norm": 62.160648345947266, "learning_rate": 5.600000000000001e-06, "loss": 2.7984, "step": 1328 }, { "epoch": 13.29, "grad_norm": 49.760929107666016, "learning_rate": 5.596666666666667e-06, "loss": 3.1106, "step": 1329 }, { "epoch": 13.3, "grad_norm": 319.26580810546875, "learning_rate": 5.593333333333334e-06, "loss": 2.3259, "step": 1330 }, { "epoch": 13.31, "grad_norm": 77.77149200439453, "learning_rate": 5.590000000000001e-06, "loss": 3.8361, "step": 1331 }, { "epoch": 13.32, "grad_norm": 138.55030822753906, "learning_rate": 5.586666666666667e-06, "loss": 6.4439, "step": 1332 }, { "epoch": 13.33, "grad_norm": 74.3822250366211, "learning_rate": 5.583333333333334e-06, "loss": 2.8177, "step": 1333 }, { "epoch": 13.34, "grad_norm": 42.74884796142578, "learning_rate": 5.580000000000001e-06, "loss": 3.3244, "step": 1334 }, { "epoch": 13.35, "grad_norm": 90.61199188232422, "learning_rate": 5.576666666666667e-06, "loss": 3.5117, "step": 1335 }, { "epoch": 13.36, "grad_norm": 38.648902893066406, "learning_rate": 5.573333333333334e-06, "loss": 3.7493, "step": 1336 }, { "epoch": 13.37, "grad_norm": 62.47495651245117, "learning_rate": 5.570000000000001e-06, "loss": 2.879, "step": 1337 }, { "epoch": 13.38, "grad_norm": 74.44377136230469, "learning_rate": 5.566666666666667e-06, "loss": 2.7877, "step": 1338 }, { "epoch": 13.39, "grad_norm": 41.57347106933594, "learning_rate": 5.563333333333334e-06, "loss": 2.619, "step": 1339 }, { "epoch": 13.4, "grad_norm": 68.55675506591797, "learning_rate": 5.560000000000001e-06, "loss": 2.5491, "step": 1340 }, { "epoch": 13.41, "grad_norm": 41.79167175292969, "learning_rate": 5.556666666666667e-06, "loss": 2.8779, "step": 1341 }, { "epoch": 13.42, "grad_norm": 161.9223175048828, "learning_rate": 5.553333333333334e-06, "loss": 3.0409, "step": 1342 }, { "epoch": 13.43, "grad_norm": 67.85066223144531, "learning_rate": 5.550000000000001e-06, "loss": 3.2136, "step": 1343 }, { "epoch": 13.44, "grad_norm": 42.915382385253906, "learning_rate": 5.546666666666667e-06, "loss": 2.6769, "step": 1344 }, { "epoch": 13.45, "grad_norm": 48.47554397583008, "learning_rate": 5.543333333333334e-06, "loss": 2.9957, "step": 1345 }, { "epoch": 13.46, "grad_norm": 69.20629119873047, "learning_rate": 5.540000000000001e-06, "loss": 3.2933, "step": 1346 }, { "epoch": 13.47, "grad_norm": 264.0637512207031, "learning_rate": 5.5366666666666665e-06, "loss": 3.2189, "step": 1347 }, { "epoch": 13.48, "grad_norm": 76.55482482910156, "learning_rate": 5.533333333333334e-06, "loss": 2.8759, "step": 1348 }, { "epoch": 13.49, "grad_norm": 86.43773651123047, "learning_rate": 5.530000000000001e-06, "loss": 3.6483, "step": 1349 }, { "epoch": 13.5, "grad_norm": 78.87715148925781, "learning_rate": 5.5266666666666666e-06, "loss": 2.6609, "step": 1350 }, { "epoch": 13.5, "eval_loss": 2.8948066234588623, "eval_map": 0.0038, "eval_map_50": 0.009, "eval_map_75": 0.0029, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0003, "eval_map_medium": 0.0076, "eval_map_neckline": 0.0141, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1142, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0259, "eval_map_small": 0.0039, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0057, "eval_mar_10": 0.0191, "eval_mar_100": 0.0242, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0857, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4993, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.407, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0236, "eval_mar_medium": 0.0397, "eval_mar_small": 0.0204, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.1915, "eval_samples_per_second": 5.497, "eval_steps_per_second": 1.374, "step": 1350 }, { "epoch": 13.51, "grad_norm": 43.900516510009766, "learning_rate": 5.5233333333333335e-06, "loss": 3.6675, "step": 1351 }, { "epoch": 13.52, "grad_norm": 55.29404830932617, "learning_rate": 5.5200000000000005e-06, "loss": 3.0232, "step": 1352 }, { "epoch": 13.53, "grad_norm": 196.89259338378906, "learning_rate": 5.516666666666667e-06, "loss": 3.3979, "step": 1353 }, { "epoch": 13.54, "grad_norm": 148.28892517089844, "learning_rate": 5.513333333333334e-06, "loss": 3.4098, "step": 1354 }, { "epoch": 13.55, "grad_norm": 54.56965255737305, "learning_rate": 5.510000000000001e-06, "loss": 2.9445, "step": 1355 }, { "epoch": 13.56, "grad_norm": 45.880271911621094, "learning_rate": 5.506666666666667e-06, "loss": 3.0333, "step": 1356 }, { "epoch": 13.57, "grad_norm": 43.24173355102539, "learning_rate": 5.503333333333334e-06, "loss": 3.5379, "step": 1357 }, { "epoch": 13.58, "grad_norm": 57.10380172729492, "learning_rate": 5.500000000000001e-06, "loss": 3.3377, "step": 1358 }, { "epoch": 13.59, "grad_norm": 61.830078125, "learning_rate": 5.496666666666667e-06, "loss": 3.085, "step": 1359 }, { "epoch": 13.6, "grad_norm": 44.17611312866211, "learning_rate": 5.493333333333334e-06, "loss": 2.9583, "step": 1360 }, { "epoch": 13.61, "grad_norm": 122.07967376708984, "learning_rate": 5.490000000000001e-06, "loss": 3.0605, "step": 1361 }, { "epoch": 13.62, "grad_norm": 128.71766662597656, "learning_rate": 5.486666666666667e-06, "loss": 3.3492, "step": 1362 }, { "epoch": 13.63, "grad_norm": 147.9226531982422, "learning_rate": 5.483333333333334e-06, "loss": 3.4507, "step": 1363 }, { "epoch": 13.64, "grad_norm": 160.2130889892578, "learning_rate": 5.480000000000001e-06, "loss": 2.7243, "step": 1364 }, { "epoch": 13.65, "grad_norm": 42.21689224243164, "learning_rate": 5.476666666666667e-06, "loss": 3.0663, "step": 1365 }, { "epoch": 13.66, "grad_norm": 61.759033203125, "learning_rate": 5.473333333333334e-06, "loss": 3.5872, "step": 1366 }, { "epoch": 13.67, "grad_norm": 50.204307556152344, "learning_rate": 5.470000000000001e-06, "loss": 3.0159, "step": 1367 }, { "epoch": 13.68, "grad_norm": 50.05724334716797, "learning_rate": 5.466666666666667e-06, "loss": 3.2128, "step": 1368 }, { "epoch": 13.69, "grad_norm": 46.2878532409668, "learning_rate": 5.463333333333334e-06, "loss": 3.0355, "step": 1369 }, { "epoch": 13.7, "grad_norm": 75.67896270751953, "learning_rate": 5.460000000000001e-06, "loss": 3.0937, "step": 1370 }, { "epoch": 13.71, "grad_norm": 76.05064392089844, "learning_rate": 5.456666666666666e-06, "loss": 2.9304, "step": 1371 }, { "epoch": 13.72, "grad_norm": 63.84006118774414, "learning_rate": 5.453333333333334e-06, "loss": 3.2631, "step": 1372 }, { "epoch": 13.73, "grad_norm": 44.3204345703125, "learning_rate": 5.450000000000001e-06, "loss": 3.0586, "step": 1373 }, { "epoch": 13.74, "grad_norm": 54.02206039428711, "learning_rate": 5.4466666666666665e-06, "loss": 3.3844, "step": 1374 }, { "epoch": 13.75, "grad_norm": 45.49407196044922, "learning_rate": 5.4433333333333335e-06, "loss": 3.266, "step": 1375 }, { "epoch": 13.76, "grad_norm": 40.1444091796875, "learning_rate": 5.4400000000000004e-06, "loss": 2.2307, "step": 1376 }, { "epoch": 13.77, "grad_norm": 42.34784698486328, "learning_rate": 5.4366666666666666e-06, "loss": 3.2731, "step": 1377 }, { "epoch": 13.78, "grad_norm": 38.807456970214844, "learning_rate": 5.4333333333333335e-06, "loss": 2.5826, "step": 1378 }, { "epoch": 13.79, "grad_norm": 39.02259826660156, "learning_rate": 5.4300000000000005e-06, "loss": 3.4358, "step": 1379 }, { "epoch": 13.8, "grad_norm": 100.69796752929688, "learning_rate": 5.426666666666667e-06, "loss": 2.9347, "step": 1380 }, { "epoch": 13.81, "grad_norm": 86.63890075683594, "learning_rate": 5.423333333333334e-06, "loss": 3.1614, "step": 1381 }, { "epoch": 13.82, "grad_norm": 43.10780715942383, "learning_rate": 5.420000000000001e-06, "loss": 3.6413, "step": 1382 }, { "epoch": 13.83, "grad_norm": 37.24336624145508, "learning_rate": 5.416666666666667e-06, "loss": 2.4379, "step": 1383 }, { "epoch": 13.84, "grad_norm": 87.45013427734375, "learning_rate": 5.413333333333334e-06, "loss": 2.8013, "step": 1384 }, { "epoch": 13.85, "grad_norm": 95.55387878417969, "learning_rate": 5.410000000000001e-06, "loss": 3.5745, "step": 1385 }, { "epoch": 13.86, "grad_norm": 33.141441345214844, "learning_rate": 5.406666666666667e-06, "loss": 6.0909, "step": 1386 }, { "epoch": 13.87, "grad_norm": 42.16377258300781, "learning_rate": 5.403333333333334e-06, "loss": 2.5047, "step": 1387 }, { "epoch": 13.88, "grad_norm": 94.50807189941406, "learning_rate": 5.400000000000001e-06, "loss": 2.8146, "step": 1388 }, { "epoch": 13.89, "grad_norm": 59.753929138183594, "learning_rate": 5.396666666666667e-06, "loss": 2.7874, "step": 1389 }, { "epoch": 13.9, "grad_norm": 131.13893127441406, "learning_rate": 5.393333333333334e-06, "loss": 2.0725, "step": 1390 }, { "epoch": 13.91, "grad_norm": 106.40316009521484, "learning_rate": 5.390000000000001e-06, "loss": 3.1835, "step": 1391 }, { "epoch": 13.92, "grad_norm": 45.83256530761719, "learning_rate": 5.386666666666667e-06, "loss": 3.4924, "step": 1392 }, { "epoch": 13.93, "grad_norm": 65.98099517822266, "learning_rate": 5.383333333333334e-06, "loss": 3.1408, "step": 1393 }, { "epoch": 13.94, "grad_norm": 118.89909362792969, "learning_rate": 5.380000000000001e-06, "loss": 2.938, "step": 1394 }, { "epoch": 13.95, "grad_norm": 172.55821228027344, "learning_rate": 5.376666666666666e-06, "loss": 2.4652, "step": 1395 }, { "epoch": 13.96, "grad_norm": 83.14400482177734, "learning_rate": 5.373333333333334e-06, "loss": 2.5724, "step": 1396 }, { "epoch": 13.97, "grad_norm": 69.05254364013672, "learning_rate": 5.370000000000001e-06, "loss": 2.6006, "step": 1397 }, { "epoch": 13.98, "grad_norm": 46.84837341308594, "learning_rate": 5.366666666666666e-06, "loss": 2.9174, "step": 1398 }, { "epoch": 13.99, "grad_norm": 79.0826187133789, "learning_rate": 5.363333333333333e-06, "loss": 3.4484, "step": 1399 }, { "epoch": 14.0, "grad_norm": 93.87140655517578, "learning_rate": 5.36e-06, "loss": 2.1238, "step": 1400 }, { "epoch": 14.0, "eval_loss": 2.859769344329834, "eval_map": 0.004, "eval_map_50": 0.0097, "eval_map_75": 0.003, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0153, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0009, "eval_map_medium": 0.0069, "eval_map_neckline": 0.0135, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1103, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0249, "eval_map_small": 0.0039, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.006, "eval_mar_10": 0.0198, "eval_mar_100": 0.0245, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0184, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0841, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5164, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.3843, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0224, "eval_mar_medium": 0.0401, "eval_mar_small": 0.021, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.1672, "eval_samples_per_second": 5.217, "eval_steps_per_second": 1.304, "step": 1400 }, { "epoch": 14.01, "grad_norm": 45.061012268066406, "learning_rate": 5.3566666666666665e-06, "loss": 2.8865, "step": 1401 }, { "epoch": 14.02, "grad_norm": 89.77491760253906, "learning_rate": 5.3533333333333335e-06, "loss": 3.7054, "step": 1402 }, { "epoch": 14.03, "grad_norm": 46.908897399902344, "learning_rate": 5.3500000000000004e-06, "loss": 3.2111, "step": 1403 }, { "epoch": 14.04, "grad_norm": 40.92363357543945, "learning_rate": 5.346666666666667e-06, "loss": 3.1497, "step": 1404 }, { "epoch": 14.05, "grad_norm": 308.8420104980469, "learning_rate": 5.3433333333333336e-06, "loss": 3.7163, "step": 1405 }, { "epoch": 14.06, "grad_norm": 66.76055145263672, "learning_rate": 5.3400000000000005e-06, "loss": 3.7526, "step": 1406 }, { "epoch": 14.07, "grad_norm": 29.71755599975586, "learning_rate": 5.336666666666667e-06, "loss": 2.9497, "step": 1407 }, { "epoch": 14.08, "grad_norm": 45.77332305908203, "learning_rate": 5.333333333333334e-06, "loss": 2.6598, "step": 1408 }, { "epoch": 14.09, "grad_norm": 60.22394561767578, "learning_rate": 5.330000000000001e-06, "loss": 2.522, "step": 1409 }, { "epoch": 14.1, "grad_norm": 47.34371566772461, "learning_rate": 5.326666666666667e-06, "loss": 3.2899, "step": 1410 }, { "epoch": 14.11, "grad_norm": 205.67225646972656, "learning_rate": 5.323333333333334e-06, "loss": 3.1807, "step": 1411 }, { "epoch": 14.12, "grad_norm": 82.37860107421875, "learning_rate": 5.320000000000001e-06, "loss": 2.996, "step": 1412 }, { "epoch": 14.13, "grad_norm": 42.44843292236328, "learning_rate": 5.316666666666667e-06, "loss": 2.6482, "step": 1413 }, { "epoch": 14.14, "grad_norm": 90.32747650146484, "learning_rate": 5.313333333333334e-06, "loss": 3.6561, "step": 1414 }, { "epoch": 14.15, "grad_norm": 51.108028411865234, "learning_rate": 5.310000000000001e-06, "loss": 2.7528, "step": 1415 }, { "epoch": 14.16, "grad_norm": 66.20177459716797, "learning_rate": 5.306666666666667e-06, "loss": 2.7324, "step": 1416 }, { "epoch": 14.17, "grad_norm": 189.49000549316406, "learning_rate": 5.303333333333334e-06, "loss": 3.1081, "step": 1417 }, { "epoch": 14.18, "grad_norm": 119.9267578125, "learning_rate": 5.300000000000001e-06, "loss": 3.2367, "step": 1418 }, { "epoch": 14.19, "grad_norm": 73.49552917480469, "learning_rate": 5.296666666666666e-06, "loss": 2.2438, "step": 1419 }, { "epoch": 14.2, "grad_norm": 52.107765197753906, "learning_rate": 5.293333333333334e-06, "loss": 2.1614, "step": 1420 }, { "epoch": 14.21, "grad_norm": 129.86985778808594, "learning_rate": 5.290000000000001e-06, "loss": 2.9811, "step": 1421 }, { "epoch": 14.22, "grad_norm": 58.22233581542969, "learning_rate": 5.286666666666666e-06, "loss": 2.8661, "step": 1422 }, { "epoch": 14.23, "grad_norm": 55.36123275756836, "learning_rate": 5.283333333333333e-06, "loss": 2.6876, "step": 1423 }, { "epoch": 14.24, "grad_norm": 70.67570495605469, "learning_rate": 5.28e-06, "loss": 2.8213, "step": 1424 }, { "epoch": 14.25, "grad_norm": 111.58439636230469, "learning_rate": 5.276666666666666e-06, "loss": 3.9134, "step": 1425 }, { "epoch": 14.26, "grad_norm": 62.36130142211914, "learning_rate": 5.273333333333333e-06, "loss": 3.3966, "step": 1426 }, { "epoch": 14.27, "grad_norm": 43.80769729614258, "learning_rate": 5.27e-06, "loss": 3.061, "step": 1427 }, { "epoch": 14.28, "grad_norm": 36.55092239379883, "learning_rate": 5.2666666666666665e-06, "loss": 2.7649, "step": 1428 }, { "epoch": 14.29, "grad_norm": 55.49384689331055, "learning_rate": 5.2633333333333335e-06, "loss": 2.5435, "step": 1429 }, { "epoch": 14.3, "grad_norm": 116.52753448486328, "learning_rate": 5.2600000000000005e-06, "loss": 2.6393, "step": 1430 }, { "epoch": 14.31, "grad_norm": 92.88204956054688, "learning_rate": 5.256666666666667e-06, "loss": 3.308, "step": 1431 }, { "epoch": 14.32, "grad_norm": 47.4485969543457, "learning_rate": 5.2533333333333336e-06, "loss": 2.4957, "step": 1432 }, { "epoch": 14.33, "grad_norm": 48.91594696044922, "learning_rate": 5.2500000000000006e-06, "loss": 2.4668, "step": 1433 }, { "epoch": 14.34, "grad_norm": 108.74813842773438, "learning_rate": 5.246666666666667e-06, "loss": 3.4532, "step": 1434 }, { "epoch": 14.35, "grad_norm": 82.996337890625, "learning_rate": 5.243333333333334e-06, "loss": 3.397, "step": 1435 }, { "epoch": 14.36, "grad_norm": 149.76141357421875, "learning_rate": 5.240000000000001e-06, "loss": 2.8024, "step": 1436 }, { "epoch": 14.37, "grad_norm": 40.510345458984375, "learning_rate": 5.236666666666667e-06, "loss": 6.0509, "step": 1437 }, { "epoch": 14.38, "grad_norm": 44.31604766845703, "learning_rate": 5.233333333333334e-06, "loss": 2.8477, "step": 1438 }, { "epoch": 14.39, "grad_norm": 42.713565826416016, "learning_rate": 5.230000000000001e-06, "loss": 2.8731, "step": 1439 }, { "epoch": 14.4, "grad_norm": 36.788902282714844, "learning_rate": 5.226666666666667e-06, "loss": 2.7961, "step": 1440 }, { "epoch": 14.41, "grad_norm": 68.90104675292969, "learning_rate": 5.223333333333334e-06, "loss": 2.4211, "step": 1441 }, { "epoch": 14.42, "grad_norm": 50.60667037963867, "learning_rate": 5.220000000000001e-06, "loss": 3.0651, "step": 1442 }, { "epoch": 14.43, "grad_norm": 117.1318130493164, "learning_rate": 5.216666666666666e-06, "loss": 3.2888, "step": 1443 }, { "epoch": 14.44, "grad_norm": 67.90980529785156, "learning_rate": 5.213333333333334e-06, "loss": 3.2274, "step": 1444 }, { "epoch": 14.45, "grad_norm": 30.92552375793457, "learning_rate": 5.210000000000001e-06, "loss": 3.0149, "step": 1445 }, { "epoch": 14.46, "grad_norm": 43.25968933105469, "learning_rate": 5.206666666666668e-06, "loss": 2.331, "step": 1446 }, { "epoch": 14.47, "grad_norm": 204.7000274658203, "learning_rate": 5.203333333333333e-06, "loss": 2.6598, "step": 1447 }, { "epoch": 14.48, "grad_norm": 61.47247314453125, "learning_rate": 5.2e-06, "loss": 3.0296, "step": 1448 }, { "epoch": 14.49, "grad_norm": 69.3780517578125, "learning_rate": 5.196666666666668e-06, "loss": 2.8727, "step": 1449 }, { "epoch": 14.5, "grad_norm": 67.90682983398438, "learning_rate": 5.193333333333333e-06, "loss": 2.5572, "step": 1450 }, { "epoch": 14.5, "eval_loss": 2.825093984603882, "eval_map": 0.0039, "eval_map_50": 0.0093, "eval_map_75": 0.0026, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0119, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0008, "eval_map_medium": 0.007, "eval_map_neckline": 0.0099, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1148, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0239, "eval_map_small": 0.0039, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0054, "eval_mar_10": 0.0201, "eval_mar_100": 0.025, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0082, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1127, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5067, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.3957, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0198, "eval_mar_medium": 0.0402, "eval_mar_small": 0.0225, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.4897, "eval_samples_per_second": 5.408, "eval_steps_per_second": 1.352, "step": 1450 }, { "epoch": 14.51, "grad_norm": 78.81498718261719, "learning_rate": 5.19e-06, "loss": 3.3871, "step": 1451 }, { "epoch": 14.52, "grad_norm": 176.85829162597656, "learning_rate": 5.186666666666667e-06, "loss": 3.5795, "step": 1452 }, { "epoch": 14.53, "grad_norm": 38.48626708984375, "learning_rate": 5.183333333333333e-06, "loss": 3.0647, "step": 1453 }, { "epoch": 14.54, "grad_norm": 170.8521728515625, "learning_rate": 5.18e-06, "loss": 2.254, "step": 1454 }, { "epoch": 14.55, "grad_norm": 45.00108337402344, "learning_rate": 5.176666666666667e-06, "loss": 3.3339, "step": 1455 }, { "epoch": 14.56, "grad_norm": 160.67111206054688, "learning_rate": 5.1733333333333335e-06, "loss": 2.4485, "step": 1456 }, { "epoch": 14.57, "grad_norm": 75.18513488769531, "learning_rate": 5.1700000000000005e-06, "loss": 2.5632, "step": 1457 }, { "epoch": 14.58, "grad_norm": 47.10391616821289, "learning_rate": 5.1666666666666675e-06, "loss": 3.3297, "step": 1458 }, { "epoch": 14.59, "grad_norm": 56.983612060546875, "learning_rate": 5.163333333333334e-06, "loss": 2.1494, "step": 1459 }, { "epoch": 14.6, "grad_norm": 179.07730102539062, "learning_rate": 5.1600000000000006e-06, "loss": 3.4216, "step": 1460 }, { "epoch": 14.61, "grad_norm": 32.935489654541016, "learning_rate": 5.1566666666666676e-06, "loss": 3.15, "step": 1461 }, { "epoch": 14.62, "grad_norm": 34.05495834350586, "learning_rate": 5.153333333333334e-06, "loss": 3.0666, "step": 1462 }, { "epoch": 14.63, "grad_norm": 109.51412963867188, "learning_rate": 5.150000000000001e-06, "loss": 2.577, "step": 1463 }, { "epoch": 14.64, "grad_norm": 33.185760498046875, "learning_rate": 5.146666666666668e-06, "loss": 2.6635, "step": 1464 }, { "epoch": 14.65, "grad_norm": 63.0116081237793, "learning_rate": 5.143333333333334e-06, "loss": 2.3523, "step": 1465 }, { "epoch": 14.66, "grad_norm": 46.94235610961914, "learning_rate": 5.140000000000001e-06, "loss": 2.5266, "step": 1466 }, { "epoch": 14.67, "grad_norm": 40.63290786743164, "learning_rate": 5.136666666666668e-06, "loss": 5.9778, "step": 1467 }, { "epoch": 14.68, "grad_norm": 36.507713317871094, "learning_rate": 5.133333333333334e-06, "loss": 2.4377, "step": 1468 }, { "epoch": 14.69, "grad_norm": 108.11258697509766, "learning_rate": 5.130000000000001e-06, "loss": 2.4668, "step": 1469 }, { "epoch": 14.7, "grad_norm": 50.60160827636719, "learning_rate": 5.126666666666668e-06, "loss": 2.2897, "step": 1470 }, { "epoch": 14.71, "grad_norm": 76.94940185546875, "learning_rate": 5.123333333333333e-06, "loss": 2.9826, "step": 1471 }, { "epoch": 14.72, "grad_norm": 29.218334197998047, "learning_rate": 5.12e-06, "loss": 3.3313, "step": 1472 }, { "epoch": 14.73, "grad_norm": 69.14483642578125, "learning_rate": 5.116666666666668e-06, "loss": 2.5596, "step": 1473 }, { "epoch": 14.74, "grad_norm": 80.5952377319336, "learning_rate": 5.113333333333333e-06, "loss": 3.5273, "step": 1474 }, { "epoch": 14.75, "grad_norm": 43.595680236816406, "learning_rate": 5.11e-06, "loss": 3.2832, "step": 1475 }, { "epoch": 14.76, "grad_norm": 75.0948257446289, "learning_rate": 5.106666666666667e-06, "loss": 2.6479, "step": 1476 }, { "epoch": 14.77, "grad_norm": 84.48950958251953, "learning_rate": 5.103333333333333e-06, "loss": 2.7604, "step": 1477 }, { "epoch": 14.78, "grad_norm": 109.54187774658203, "learning_rate": 5.1e-06, "loss": 3.0162, "step": 1478 }, { "epoch": 14.79, "grad_norm": 61.3588752746582, "learning_rate": 5.096666666666667e-06, "loss": 3.2318, "step": 1479 }, { "epoch": 14.8, "grad_norm": 96.90011596679688, "learning_rate": 5.093333333333333e-06, "loss": 3.0823, "step": 1480 }, { "epoch": 14.81, "grad_norm": 43.31819534301758, "learning_rate": 5.09e-06, "loss": 3.6146, "step": 1481 }, { "epoch": 14.82, "grad_norm": 112.53628540039062, "learning_rate": 5.086666666666667e-06, "loss": 4.1114, "step": 1482 }, { "epoch": 14.83, "grad_norm": 79.32304382324219, "learning_rate": 5.0833333333333335e-06, "loss": 3.4108, "step": 1483 }, { "epoch": 14.84, "grad_norm": 88.50263977050781, "learning_rate": 5.0800000000000005e-06, "loss": 3.1952, "step": 1484 }, { "epoch": 14.85, "grad_norm": 68.44635772705078, "learning_rate": 5.0766666666666675e-06, "loss": 2.7247, "step": 1485 }, { "epoch": 14.86, "grad_norm": 39.98576354980469, "learning_rate": 5.073333333333334e-06, "loss": 3.1243, "step": 1486 }, { "epoch": 14.87, "grad_norm": 62.27031326293945, "learning_rate": 5.070000000000001e-06, "loss": 2.7504, "step": 1487 }, { "epoch": 14.88, "grad_norm": 82.09013366699219, "learning_rate": 5.0666666666666676e-06, "loss": 2.3922, "step": 1488 }, { "epoch": 14.89, "grad_norm": 107.42977905273438, "learning_rate": 5.063333333333334e-06, "loss": 2.8943, "step": 1489 }, { "epoch": 14.9, "grad_norm": 196.55770874023438, "learning_rate": 5.060000000000001e-06, "loss": 3.6099, "step": 1490 }, { "epoch": 14.91, "grad_norm": 38.78766632080078, "learning_rate": 5.056666666666668e-06, "loss": 2.9987, "step": 1491 }, { "epoch": 14.92, "grad_norm": 46.68333435058594, "learning_rate": 5.053333333333334e-06, "loss": 3.4622, "step": 1492 }, { "epoch": 14.93, "grad_norm": 69.1555404663086, "learning_rate": 5.050000000000001e-06, "loss": 2.8679, "step": 1493 }, { "epoch": 14.94, "grad_norm": 359.9154968261719, "learning_rate": 5.046666666666668e-06, "loss": 2.9657, "step": 1494 }, { "epoch": 14.95, "grad_norm": 46.2937126159668, "learning_rate": 5.043333333333333e-06, "loss": 3.1733, "step": 1495 }, { "epoch": 14.96, "grad_norm": 108.9802017211914, "learning_rate": 5.04e-06, "loss": 3.1858, "step": 1496 }, { "epoch": 14.97, "grad_norm": 152.80203247070312, "learning_rate": 5.036666666666668e-06, "loss": 3.0876, "step": 1497 }, { "epoch": 14.98, "grad_norm": 151.9467010498047, "learning_rate": 5.033333333333333e-06, "loss": 2.632, "step": 1498 }, { "epoch": 14.99, "grad_norm": 81.38361358642578, "learning_rate": 5.03e-06, "loss": 2.9349, "step": 1499 }, { "epoch": 15.0, "grad_norm": 143.7174072265625, "learning_rate": 5.026666666666667e-06, "loss": 2.6484, "step": 1500 }, { "epoch": 15.0, "eval_loss": 2.824601173400879, "eval_map": 0.0039, "eval_map_50": 0.0093, "eval_map_75": 0.003, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0059, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0005, "eval_map_medium": 0.0074, "eval_map_neckline": 0.0055, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1223, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0249, "eval_map_small": 0.004, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0057, "eval_mar_10": 0.0196, "eval_mar_100": 0.0244, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0082, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.0778, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5157, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.3974, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0235, "eval_mar_medium": 0.0387, "eval_mar_small": 0.022, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.0609, "eval_samples_per_second": 5.246, "eval_steps_per_second": 1.312, "step": 1500 }, { "epoch": 15.01, "grad_norm": 89.34796905517578, "learning_rate": 5.023333333333333e-06, "loss": 2.0286, "step": 1501 }, { "epoch": 15.02, "grad_norm": 33.16562271118164, "learning_rate": 5.02e-06, "loss": 3.0701, "step": 1502 }, { "epoch": 15.03, "grad_norm": 203.82174682617188, "learning_rate": 5.016666666666667e-06, "loss": 3.2466, "step": 1503 }, { "epoch": 15.04, "grad_norm": 62.71471405029297, "learning_rate": 5.013333333333333e-06, "loss": 3.2709, "step": 1504 }, { "epoch": 15.05, "grad_norm": 40.61738204956055, "learning_rate": 5.01e-06, "loss": 2.5223, "step": 1505 }, { "epoch": 15.06, "grad_norm": 107.06895446777344, "learning_rate": 5.006666666666667e-06, "loss": 3.3446, "step": 1506 }, { "epoch": 15.07, "grad_norm": 37.426273345947266, "learning_rate": 5.0033333333333334e-06, "loss": 6.2007, "step": 1507 }, { "epoch": 15.08, "grad_norm": 51.89812088012695, "learning_rate": 5e-06, "loss": 3.0337, "step": 1508 }, { "epoch": 15.09, "grad_norm": 55.900596618652344, "learning_rate": 4.9966666666666665e-06, "loss": 3.5407, "step": 1509 }, { "epoch": 15.1, "grad_norm": 58.12286376953125, "learning_rate": 4.9933333333333335e-06, "loss": 3.3546, "step": 1510 }, { "epoch": 15.11, "grad_norm": 42.93201446533203, "learning_rate": 4.9900000000000005e-06, "loss": 3.2565, "step": 1511 }, { "epoch": 15.12, "grad_norm": 104.40103912353516, "learning_rate": 4.986666666666667e-06, "loss": 3.1433, "step": 1512 }, { "epoch": 15.13, "grad_norm": 76.1496810913086, "learning_rate": 4.983333333333334e-06, "loss": 3.0055, "step": 1513 }, { "epoch": 15.14, "grad_norm": 50.55366516113281, "learning_rate": 4.980000000000001e-06, "loss": 2.8734, "step": 1514 }, { "epoch": 15.15, "grad_norm": 77.1627197265625, "learning_rate": 4.976666666666667e-06, "loss": 2.2387, "step": 1515 }, { "epoch": 15.16, "grad_norm": 73.22570037841797, "learning_rate": 4.973333333333334e-06, "loss": 2.7804, "step": 1516 }, { "epoch": 15.17, "grad_norm": 180.1477508544922, "learning_rate": 4.970000000000001e-06, "loss": 3.238, "step": 1517 }, { "epoch": 15.18, "grad_norm": 74.73701477050781, "learning_rate": 4.966666666666667e-06, "loss": 2.6741, "step": 1518 }, { "epoch": 15.19, "grad_norm": 65.67842864990234, "learning_rate": 4.963333333333334e-06, "loss": 2.4568, "step": 1519 }, { "epoch": 15.2, "grad_norm": 43.1766471862793, "learning_rate": 4.960000000000001e-06, "loss": 5.7566, "step": 1520 }, { "epoch": 15.21, "grad_norm": 56.40361785888672, "learning_rate": 4.956666666666667e-06, "loss": 3.148, "step": 1521 }, { "epoch": 15.22, "grad_norm": 68.9504623413086, "learning_rate": 4.953333333333334e-06, "loss": 3.0764, "step": 1522 }, { "epoch": 15.23, "grad_norm": 30.75697135925293, "learning_rate": 4.95e-06, "loss": 3.1076, "step": 1523 }, { "epoch": 15.24, "grad_norm": 248.60557556152344, "learning_rate": 4.946666666666667e-06, "loss": 2.8873, "step": 1524 }, { "epoch": 15.25, "grad_norm": 88.14219665527344, "learning_rate": 4.943333333333334e-06, "loss": 3.0959, "step": 1525 }, { "epoch": 15.26, "grad_norm": 50.63975143432617, "learning_rate": 4.94e-06, "loss": 2.79, "step": 1526 }, { "epoch": 15.27, "grad_norm": 115.17135620117188, "learning_rate": 4.936666666666667e-06, "loss": 2.9716, "step": 1527 }, { "epoch": 15.28, "grad_norm": 107.84794616699219, "learning_rate": 4.933333333333334e-06, "loss": 3.2355, "step": 1528 }, { "epoch": 15.29, "grad_norm": 332.5867004394531, "learning_rate": 4.93e-06, "loss": 2.4461, "step": 1529 }, { "epoch": 15.3, "grad_norm": 43.75825500488281, "learning_rate": 4.926666666666667e-06, "loss": 3.4706, "step": 1530 }, { "epoch": 15.31, "grad_norm": 50.91869354248047, "learning_rate": 4.923333333333334e-06, "loss": 2.2322, "step": 1531 }, { "epoch": 15.32, "grad_norm": 66.60010528564453, "learning_rate": 4.92e-06, "loss": 3.6067, "step": 1532 }, { "epoch": 15.33, "grad_norm": 48.35940933227539, "learning_rate": 4.9166666666666665e-06, "loss": 2.7179, "step": 1533 }, { "epoch": 15.34, "grad_norm": 55.250511169433594, "learning_rate": 4.9133333333333334e-06, "loss": 2.8217, "step": 1534 }, { "epoch": 15.35, "grad_norm": 53.41901397705078, "learning_rate": 4.9100000000000004e-06, "loss": 3.0473, "step": 1535 }, { "epoch": 15.36, "grad_norm": 81.73320007324219, "learning_rate": 4.9066666666666666e-06, "loss": 2.4525, "step": 1536 }, { "epoch": 15.37, "grad_norm": 38.39985275268555, "learning_rate": 4.9033333333333335e-06, "loss": 2.8877, "step": 1537 }, { "epoch": 15.38, "grad_norm": 42.849727630615234, "learning_rate": 4.9000000000000005e-06, "loss": 3.5325, "step": 1538 }, { "epoch": 15.39, "grad_norm": 188.24082946777344, "learning_rate": 4.896666666666667e-06, "loss": 2.6322, "step": 1539 }, { "epoch": 15.4, "grad_norm": 74.44679260253906, "learning_rate": 4.893333333333334e-06, "loss": 3.4677, "step": 1540 }, { "epoch": 15.41, "grad_norm": 182.0277557373047, "learning_rate": 4.890000000000001e-06, "loss": 3.1218, "step": 1541 }, { "epoch": 15.42, "grad_norm": 41.87655258178711, "learning_rate": 4.886666666666668e-06, "loss": 3.1574, "step": 1542 }, { "epoch": 15.43, "grad_norm": 578.962890625, "learning_rate": 4.883333333333334e-06, "loss": 2.8744, "step": 1543 }, { "epoch": 15.44, "grad_norm": 34.64506149291992, "learning_rate": 4.880000000000001e-06, "loss": 3.2205, "step": 1544 }, { "epoch": 15.45, "grad_norm": 50.32636642456055, "learning_rate": 4.876666666666668e-06, "loss": 2.9351, "step": 1545 }, { "epoch": 15.46, "grad_norm": 47.67721939086914, "learning_rate": 4.873333333333334e-06, "loss": 3.2628, "step": 1546 }, { "epoch": 15.47, "grad_norm": 38.18626022338867, "learning_rate": 4.87e-06, "loss": 2.6749, "step": 1547 }, { "epoch": 15.48, "grad_norm": 378.37103271484375, "learning_rate": 4.866666666666667e-06, "loss": 3.3712, "step": 1548 }, { "epoch": 15.49, "grad_norm": 52.901512145996094, "learning_rate": 4.863333333333334e-06, "loss": 3.1481, "step": 1549 }, { "epoch": 15.5, "grad_norm": 41.42707824707031, "learning_rate": 4.86e-06, "loss": 2.6171, "step": 1550 }, { "epoch": 15.5, "eval_loss": 2.801844596862793, "eval_map": 0.004, "eval_map_50": 0.01, "eval_map_75": 0.0032, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0104, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0008, "eval_map_medium": 0.0074, "eval_map_neckline": 0.0145, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1122, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0266, "eval_map_small": 0.0037, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0066, "eval_mar_10": 0.0211, "eval_mar_100": 0.0263, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0306, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.127, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5052, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4165, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0258, "eval_mar_medium": 0.0443, "eval_mar_small": 0.0197, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.1071, "eval_samples_per_second": 5.234, "eval_steps_per_second": 1.308, "step": 1550 }, { "epoch": 15.51, "grad_norm": 662.0177001953125, "learning_rate": 4.856666666666667e-06, "loss": 2.3181, "step": 1551 }, { "epoch": 15.52, "grad_norm": 31.65778923034668, "learning_rate": 4.853333333333334e-06, "loss": 3.1898, "step": 1552 }, { "epoch": 15.53, "grad_norm": 60.95957565307617, "learning_rate": 4.85e-06, "loss": 3.4925, "step": 1553 }, { "epoch": 15.54, "grad_norm": 116.59793090820312, "learning_rate": 4.846666666666667e-06, "loss": 2.5313, "step": 1554 }, { "epoch": 15.55, "grad_norm": 27.960905075073242, "learning_rate": 4.843333333333334e-06, "loss": 2.741, "step": 1555 }, { "epoch": 15.56, "grad_norm": 44.73579025268555, "learning_rate": 4.84e-06, "loss": 2.6303, "step": 1556 }, { "epoch": 15.57, "grad_norm": 49.717769622802734, "learning_rate": 4.836666666666667e-06, "loss": 2.5861, "step": 1557 }, { "epoch": 15.58, "grad_norm": 40.62715148925781, "learning_rate": 4.833333333333333e-06, "loss": 2.7263, "step": 1558 }, { "epoch": 15.59, "grad_norm": 86.83805847167969, "learning_rate": 4.83e-06, "loss": 3.2511, "step": 1559 }, { "epoch": 15.6, "grad_norm": 40.13988494873047, "learning_rate": 4.826666666666667e-06, "loss": 3.0922, "step": 1560 }, { "epoch": 15.61, "grad_norm": 85.63914489746094, "learning_rate": 4.8233333333333335e-06, "loss": 2.4938, "step": 1561 }, { "epoch": 15.62, "grad_norm": 86.08257293701172, "learning_rate": 4.8200000000000004e-06, "loss": 3.1518, "step": 1562 }, { "epoch": 15.63, "grad_norm": 110.84781646728516, "learning_rate": 4.816666666666667e-06, "loss": 2.5322, "step": 1563 }, { "epoch": 15.64, "grad_norm": 52.63396453857422, "learning_rate": 4.8133333333333336e-06, "loss": 3.4903, "step": 1564 }, { "epoch": 15.65, "grad_norm": 62.26469802856445, "learning_rate": 4.8100000000000005e-06, "loss": 4.3188, "step": 1565 }, { "epoch": 15.66, "grad_norm": 37.29914855957031, "learning_rate": 4.8066666666666675e-06, "loss": 3.1811, "step": 1566 }, { "epoch": 15.67, "grad_norm": 89.87367248535156, "learning_rate": 4.803333333333334e-06, "loss": 2.2278, "step": 1567 }, { "epoch": 15.68, "grad_norm": 83.56629180908203, "learning_rate": 4.800000000000001e-06, "loss": 3.1504, "step": 1568 }, { "epoch": 15.69, "grad_norm": 45.26663589477539, "learning_rate": 4.796666666666668e-06, "loss": 2.3269, "step": 1569 }, { "epoch": 15.7, "grad_norm": 114.92687225341797, "learning_rate": 4.793333333333334e-06, "loss": 2.831, "step": 1570 }, { "epoch": 15.71, "grad_norm": 109.77179718017578, "learning_rate": 4.79e-06, "loss": 2.9562, "step": 1571 }, { "epoch": 15.72, "grad_norm": 76.30691528320312, "learning_rate": 4.786666666666667e-06, "loss": 3.213, "step": 1572 }, { "epoch": 15.73, "grad_norm": 67.7087173461914, "learning_rate": 4.783333333333334e-06, "loss": 2.7529, "step": 1573 }, { "epoch": 15.74, "grad_norm": 69.68976593017578, "learning_rate": 4.78e-06, "loss": 3.0628, "step": 1574 }, { "epoch": 15.75, "grad_norm": 134.46102905273438, "learning_rate": 4.776666666666667e-06, "loss": 3.256, "step": 1575 }, { "epoch": 15.76, "grad_norm": 87.40361785888672, "learning_rate": 4.773333333333334e-06, "loss": 3.3703, "step": 1576 }, { "epoch": 15.77, "grad_norm": 58.702632904052734, "learning_rate": 4.77e-06, "loss": 2.4902, "step": 1577 }, { "epoch": 15.78, "grad_norm": 114.21731567382812, "learning_rate": 4.766666666666667e-06, "loss": 3.0489, "step": 1578 }, { "epoch": 15.79, "grad_norm": 167.2410125732422, "learning_rate": 4.763333333333334e-06, "loss": 2.5602, "step": 1579 }, { "epoch": 15.8, "grad_norm": 61.165565490722656, "learning_rate": 4.76e-06, "loss": 3.4073, "step": 1580 }, { "epoch": 15.81, "grad_norm": 254.77426147460938, "learning_rate": 4.756666666666667e-06, "loss": 3.3068, "step": 1581 }, { "epoch": 15.82, "grad_norm": 61.220603942871094, "learning_rate": 4.753333333333333e-06, "loss": 3.342, "step": 1582 }, { "epoch": 15.83, "grad_norm": 56.912967681884766, "learning_rate": 4.75e-06, "loss": 2.5609, "step": 1583 }, { "epoch": 15.84, "grad_norm": 226.22161865234375, "learning_rate": 4.746666666666667e-06, "loss": 3.1378, "step": 1584 }, { "epoch": 15.85, "grad_norm": 191.59719848632812, "learning_rate": 4.743333333333333e-06, "loss": 2.6352, "step": 1585 }, { "epoch": 15.86, "grad_norm": 121.24445343017578, "learning_rate": 4.74e-06, "loss": 2.6626, "step": 1586 }, { "epoch": 15.87, "grad_norm": 48.53812026977539, "learning_rate": 4.736666666666667e-06, "loss": 2.5227, "step": 1587 }, { "epoch": 15.88, "grad_norm": 42.24192428588867, "learning_rate": 4.7333333333333335e-06, "loss": 3.8011, "step": 1588 }, { "epoch": 15.89, "grad_norm": 101.07719421386719, "learning_rate": 4.7300000000000005e-06, "loss": 2.8839, "step": 1589 }, { "epoch": 15.9, "grad_norm": 55.93063735961914, "learning_rate": 4.7266666666666674e-06, "loss": 3.5112, "step": 1590 }, { "epoch": 15.91, "grad_norm": 44.656375885009766, "learning_rate": 4.7233333333333336e-06, "loss": 3.3509, "step": 1591 }, { "epoch": 15.92, "grad_norm": 126.11185455322266, "learning_rate": 4.7200000000000005e-06, "loss": 2.4669, "step": 1592 }, { "epoch": 15.93, "grad_norm": 29.57367515563965, "learning_rate": 4.7166666666666675e-06, "loss": 2.4593, "step": 1593 }, { "epoch": 15.94, "grad_norm": 83.23413848876953, "learning_rate": 4.713333333333334e-06, "loss": 2.3605, "step": 1594 }, { "epoch": 15.95, "grad_norm": 279.34625244140625, "learning_rate": 4.71e-06, "loss": 2.7733, "step": 1595 }, { "epoch": 15.96, "grad_norm": 71.80205535888672, "learning_rate": 4.706666666666667e-06, "loss": 2.9868, "step": 1596 }, { "epoch": 15.97, "grad_norm": 54.41868591308594, "learning_rate": 4.703333333333334e-06, "loss": 2.7623, "step": 1597 }, { "epoch": 15.98, "grad_norm": 64.18541717529297, "learning_rate": 4.7e-06, "loss": 3.504, "step": 1598 }, { "epoch": 15.99, "grad_norm": 63.1773567199707, "learning_rate": 4.696666666666667e-06, "loss": 3.5123, "step": 1599 }, { "epoch": 16.0, "grad_norm": 150.7760467529297, "learning_rate": 4.693333333333334e-06, "loss": 2.7484, "step": 1600 }, { "epoch": 16.0, "eval_loss": 2.8278472423553467, "eval_map": 0.004, "eval_map_50": 0.009, "eval_map_75": 0.0031, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0048, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0005, "eval_map_medium": 0.0086, "eval_map_neckline": 0.0134, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1153, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.029, "eval_map_small": 0.0037, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0064, "eval_mar_10": 0.0212, "eval_mar_100": 0.0256, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0184, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1397, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4978, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.3957, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0224, "eval_mar_medium": 0.0435, "eval_mar_small": 0.0206, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.4708, "eval_samples_per_second": 5.136, "eval_steps_per_second": 1.284, "step": 1600 }, { "epoch": 16.01, "grad_norm": 51.64803695678711, "learning_rate": 4.69e-06, "loss": 2.9781, "step": 1601 }, { "epoch": 16.02, "grad_norm": 94.46916198730469, "learning_rate": 4.686666666666667e-06, "loss": 3.2839, "step": 1602 }, { "epoch": 16.03, "grad_norm": 44.00434875488281, "learning_rate": 4.683333333333334e-06, "loss": 2.6544, "step": 1603 }, { "epoch": 16.04, "grad_norm": 39.65818786621094, "learning_rate": 4.680000000000001e-06, "loss": 4.4171, "step": 1604 }, { "epoch": 16.05, "grad_norm": 174.75711059570312, "learning_rate": 4.676666666666667e-06, "loss": 2.734, "step": 1605 }, { "epoch": 16.06, "grad_norm": 54.386314392089844, "learning_rate": 4.673333333333333e-06, "loss": 2.7612, "step": 1606 }, { "epoch": 16.07, "grad_norm": 49.423831939697266, "learning_rate": 4.670000000000001e-06, "loss": 3.002, "step": 1607 }, { "epoch": 16.08, "grad_norm": 53.813690185546875, "learning_rate": 4.666666666666667e-06, "loss": 3.3331, "step": 1608 }, { "epoch": 16.09, "grad_norm": 52.423038482666016, "learning_rate": 4.663333333333333e-06, "loss": 2.9268, "step": 1609 }, { "epoch": 16.1, "grad_norm": 76.59796905517578, "learning_rate": 4.66e-06, "loss": 2.167, "step": 1610 }, { "epoch": 16.11, "grad_norm": 39.17863464355469, "learning_rate": 4.656666666666667e-06, "loss": 3.0942, "step": 1611 }, { "epoch": 16.12, "grad_norm": 63.02248764038086, "learning_rate": 4.653333333333333e-06, "loss": 3.6952, "step": 1612 }, { "epoch": 16.13, "grad_norm": 50.39514923095703, "learning_rate": 4.65e-06, "loss": 3.4799, "step": 1613 }, { "epoch": 16.14, "grad_norm": 60.208248138427734, "learning_rate": 4.646666666666667e-06, "loss": 2.6786, "step": 1614 }, { "epoch": 16.15, "grad_norm": 76.2073974609375, "learning_rate": 4.6433333333333335e-06, "loss": 2.4772, "step": 1615 }, { "epoch": 16.16, "grad_norm": 40.48984909057617, "learning_rate": 4.6400000000000005e-06, "loss": 2.7724, "step": 1616 }, { "epoch": 16.17, "grad_norm": 67.50389099121094, "learning_rate": 4.6366666666666674e-06, "loss": 2.4111, "step": 1617 }, { "epoch": 16.18, "grad_norm": 76.6739501953125, "learning_rate": 4.633333333333334e-06, "loss": 2.8431, "step": 1618 }, { "epoch": 16.19, "grad_norm": 53.61623001098633, "learning_rate": 4.6300000000000006e-06, "loss": 3.0553, "step": 1619 }, { "epoch": 16.2, "grad_norm": 37.533424377441406, "learning_rate": 4.626666666666667e-06, "loss": 2.8481, "step": 1620 }, { "epoch": 16.21, "grad_norm": 35.64815139770508, "learning_rate": 4.623333333333334e-06, "loss": 2.461, "step": 1621 }, { "epoch": 16.22, "grad_norm": 50.49835205078125, "learning_rate": 4.620000000000001e-06, "loss": 2.8362, "step": 1622 }, { "epoch": 16.23, "grad_norm": 97.53995513916016, "learning_rate": 4.616666666666667e-06, "loss": 3.5247, "step": 1623 }, { "epoch": 16.24, "grad_norm": 49.3843879699707, "learning_rate": 4.613333333333334e-06, "loss": 1.9732, "step": 1624 }, { "epoch": 16.25, "grad_norm": 53.152435302734375, "learning_rate": 4.610000000000001e-06, "loss": 3.1136, "step": 1625 }, { "epoch": 16.26, "grad_norm": 60.18634033203125, "learning_rate": 4.606666666666667e-06, "loss": 2.3059, "step": 1626 }, { "epoch": 16.27, "grad_norm": 52.225772857666016, "learning_rate": 4.603333333333334e-06, "loss": 2.7955, "step": 1627 }, { "epoch": 16.28, "grad_norm": 32.46910095214844, "learning_rate": 4.600000000000001e-06, "loss": 2.9435, "step": 1628 }, { "epoch": 16.29, "grad_norm": 61.46830368041992, "learning_rate": 4.596666666666667e-06, "loss": 3.8636, "step": 1629 }, { "epoch": 16.3, "grad_norm": 47.77442932128906, "learning_rate": 4.593333333333333e-06, "loss": 3.0252, "step": 1630 }, { "epoch": 16.31, "grad_norm": 39.13760757446289, "learning_rate": 4.590000000000001e-06, "loss": 2.8974, "step": 1631 }, { "epoch": 16.32, "grad_norm": 183.57640075683594, "learning_rate": 4.586666666666667e-06, "loss": 2.4719, "step": 1632 }, { "epoch": 16.33, "grad_norm": 131.42417907714844, "learning_rate": 4.583333333333333e-06, "loss": 3.3668, "step": 1633 }, { "epoch": 16.34, "grad_norm": 91.36686706542969, "learning_rate": 4.58e-06, "loss": 3.4152, "step": 1634 }, { "epoch": 16.35, "grad_norm": 77.6273422241211, "learning_rate": 4.576666666666667e-06, "loss": 2.1013, "step": 1635 }, { "epoch": 16.36, "grad_norm": 60.077423095703125, "learning_rate": 4.573333333333333e-06, "loss": 2.7132, "step": 1636 }, { "epoch": 16.37, "grad_norm": 43.609920501708984, "learning_rate": 4.57e-06, "loss": 3.0349, "step": 1637 }, { "epoch": 16.38, "grad_norm": 38.53335952758789, "learning_rate": 4.566666666666667e-06, "loss": 3.1609, "step": 1638 }, { "epoch": 16.39, "grad_norm": 53.12407684326172, "learning_rate": 4.563333333333333e-06, "loss": 2.9801, "step": 1639 }, { "epoch": 16.4, "grad_norm": 74.54437255859375, "learning_rate": 4.56e-06, "loss": 2.4392, "step": 1640 }, { "epoch": 16.41, "grad_norm": 40.53291320800781, "learning_rate": 4.556666666666667e-06, "loss": 3.354, "step": 1641 }, { "epoch": 16.42, "grad_norm": 57.939979553222656, "learning_rate": 4.5533333333333335e-06, "loss": 2.532, "step": 1642 }, { "epoch": 16.43, "grad_norm": 50.95173645019531, "learning_rate": 4.5500000000000005e-06, "loss": 3.0336, "step": 1643 }, { "epoch": 16.44, "grad_norm": 47.0252685546875, "learning_rate": 4.546666666666667e-06, "loss": 2.7038, "step": 1644 }, { "epoch": 16.45, "grad_norm": 59.31657409667969, "learning_rate": 4.543333333333334e-06, "loss": 2.9445, "step": 1645 }, { "epoch": 16.46, "grad_norm": 39.010894775390625, "learning_rate": 4.540000000000001e-06, "loss": 3.4846, "step": 1646 }, { "epoch": 16.47, "grad_norm": 54.28582763671875, "learning_rate": 4.536666666666667e-06, "loss": 2.7605, "step": 1647 }, { "epoch": 16.48, "grad_norm": 49.737125396728516, "learning_rate": 4.533333333333334e-06, "loss": 2.9355, "step": 1648 }, { "epoch": 16.49, "grad_norm": 169.25360107421875, "learning_rate": 4.530000000000001e-06, "loss": 2.6399, "step": 1649 }, { "epoch": 16.5, "grad_norm": 47.61027908325195, "learning_rate": 4.526666666666667e-06, "loss": 2.2523, "step": 1650 }, { "epoch": 16.5, "eval_loss": 2.7804033756256104, "eval_map": 0.0039, "eval_map_50": 0.009, "eval_map_75": 0.0031, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.005, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0027, "eval_map_medium": 0.0087, "eval_map_neckline": 0.0159, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1118, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0287, "eval_map_small": 0.0037, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0063, "eval_mar_10": 0.023, "eval_mar_100": 0.0281, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0224, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1651, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5127, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4539, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.025, "eval_mar_medium": 0.0489, "eval_mar_small": 0.0201, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.2256, "eval_samples_per_second": 5.201, "eval_steps_per_second": 1.3, "step": 1650 }, { "epoch": 16.51, "grad_norm": 29.714706420898438, "learning_rate": 4.523333333333334e-06, "loss": 3.742, "step": 1651 }, { "epoch": 16.52, "grad_norm": 52.898590087890625, "learning_rate": 4.520000000000001e-06, "loss": 2.4891, "step": 1652 }, { "epoch": 16.53, "grad_norm": 95.74098205566406, "learning_rate": 4.516666666666667e-06, "loss": 3.6412, "step": 1653 }, { "epoch": 16.54, "grad_norm": 41.9851188659668, "learning_rate": 4.513333333333333e-06, "loss": 3.0417, "step": 1654 }, { "epoch": 16.55, "grad_norm": 101.03370666503906, "learning_rate": 4.510000000000001e-06, "loss": 2.9819, "step": 1655 }, { "epoch": 16.56, "grad_norm": 128.2178955078125, "learning_rate": 4.506666666666667e-06, "loss": 3.1893, "step": 1656 }, { "epoch": 16.57, "grad_norm": 38.25985336303711, "learning_rate": 4.503333333333333e-06, "loss": 3.1134, "step": 1657 }, { "epoch": 16.58, "grad_norm": 121.27034759521484, "learning_rate": 4.5e-06, "loss": 1.9778, "step": 1658 }, { "epoch": 16.59, "grad_norm": 40.41843032836914, "learning_rate": 4.496666666666667e-06, "loss": 2.3524, "step": 1659 }, { "epoch": 16.6, "grad_norm": 61.926025390625, "learning_rate": 4.493333333333333e-06, "loss": 3.0764, "step": 1660 }, { "epoch": 16.61, "grad_norm": 50.48045349121094, "learning_rate": 4.49e-06, "loss": 2.7741, "step": 1661 }, { "epoch": 16.62, "grad_norm": 60.41294479370117, "learning_rate": 4.486666666666667e-06, "loss": 3.1287, "step": 1662 }, { "epoch": 16.63, "grad_norm": 59.9041633605957, "learning_rate": 4.483333333333333e-06, "loss": 2.8513, "step": 1663 }, { "epoch": 16.64, "grad_norm": 41.5048942565918, "learning_rate": 4.48e-06, "loss": 3.1221, "step": 1664 }, { "epoch": 16.65, "grad_norm": 51.5759391784668, "learning_rate": 4.476666666666667e-06, "loss": 2.973, "step": 1665 }, { "epoch": 16.66, "grad_norm": 43.65202331542969, "learning_rate": 4.473333333333334e-06, "loss": 2.2894, "step": 1666 }, { "epoch": 16.67, "grad_norm": 127.49104309082031, "learning_rate": 4.47e-06, "loss": 2.9698, "step": 1667 }, { "epoch": 16.68, "grad_norm": 138.65789794921875, "learning_rate": 4.4666666666666665e-06, "loss": 2.9274, "step": 1668 }, { "epoch": 16.69, "grad_norm": 63.20673370361328, "learning_rate": 4.463333333333334e-06, "loss": 2.7494, "step": 1669 }, { "epoch": 16.7, "grad_norm": 143.61065673828125, "learning_rate": 4.4600000000000005e-06, "loss": 3.3401, "step": 1670 }, { "epoch": 16.71, "grad_norm": 24.51004409790039, "learning_rate": 4.456666666666667e-06, "loss": 3.529, "step": 1671 }, { "epoch": 16.72, "grad_norm": 31.250057220458984, "learning_rate": 4.453333333333334e-06, "loss": 3.4113, "step": 1672 }, { "epoch": 16.73, "grad_norm": 63.55746841430664, "learning_rate": 4.450000000000001e-06, "loss": 2.9982, "step": 1673 }, { "epoch": 16.74, "grad_norm": 286.0579833984375, "learning_rate": 4.446666666666667e-06, "loss": 2.9264, "step": 1674 }, { "epoch": 16.75, "grad_norm": 144.32542419433594, "learning_rate": 4.443333333333334e-06, "loss": 3.0289, "step": 1675 }, { "epoch": 16.76, "grad_norm": 158.62973022460938, "learning_rate": 4.440000000000001e-06, "loss": 3.0534, "step": 1676 }, { "epoch": 16.77, "grad_norm": 60.9655876159668, "learning_rate": 4.436666666666667e-06, "loss": 2.6113, "step": 1677 }, { "epoch": 16.78, "grad_norm": 42.86030960083008, "learning_rate": 4.433333333333334e-06, "loss": 5.9899, "step": 1678 }, { "epoch": 16.79, "grad_norm": 87.87378692626953, "learning_rate": 4.430000000000001e-06, "loss": 3.1965, "step": 1679 }, { "epoch": 16.8, "grad_norm": 79.91812133789062, "learning_rate": 4.426666666666667e-06, "loss": 3.0147, "step": 1680 }, { "epoch": 16.81, "grad_norm": 44.11559295654297, "learning_rate": 4.423333333333334e-06, "loss": 3.2712, "step": 1681 }, { "epoch": 16.82, "grad_norm": 92.6834487915039, "learning_rate": 4.42e-06, "loss": 3.2978, "step": 1682 }, { "epoch": 16.83, "grad_norm": 52.873687744140625, "learning_rate": 4.416666666666667e-06, "loss": 2.9335, "step": 1683 }, { "epoch": 16.84, "grad_norm": 200.7749481201172, "learning_rate": 4.413333333333334e-06, "loss": 2.7831, "step": 1684 }, { "epoch": 16.85, "grad_norm": 106.32072448730469, "learning_rate": 4.41e-06, "loss": 2.3006, "step": 1685 }, { "epoch": 16.86, "grad_norm": 50.589359283447266, "learning_rate": 4.406666666666667e-06, "loss": 3.499, "step": 1686 }, { "epoch": 16.87, "grad_norm": 108.40155029296875, "learning_rate": 4.403333333333334e-06, "loss": 2.351, "step": 1687 }, { "epoch": 16.88, "grad_norm": 41.256500244140625, "learning_rate": 4.4e-06, "loss": 6.0889, "step": 1688 }, { "epoch": 16.89, "grad_norm": 42.37044143676758, "learning_rate": 4.396666666666667e-06, "loss": 3.1435, "step": 1689 }, { "epoch": 16.9, "grad_norm": 39.3423957824707, "learning_rate": 4.393333333333334e-06, "loss": 2.8037, "step": 1690 }, { "epoch": 16.91, "grad_norm": 49.95433807373047, "learning_rate": 4.39e-06, "loss": 2.464, "step": 1691 }, { "epoch": 16.92, "grad_norm": 39.653228759765625, "learning_rate": 4.3866666666666665e-06, "loss": 3.1769, "step": 1692 }, { "epoch": 16.93, "grad_norm": 143.61099243164062, "learning_rate": 4.383333333333334e-06, "loss": 3.4779, "step": 1693 }, { "epoch": 16.94, "grad_norm": 54.84747314453125, "learning_rate": 4.38e-06, "loss": 3.3779, "step": 1694 }, { "epoch": 16.95, "grad_norm": 54.9208984375, "learning_rate": 4.3766666666666665e-06, "loss": 2.804, "step": 1695 }, { "epoch": 16.96, "grad_norm": 72.18391418457031, "learning_rate": 4.3733333333333335e-06, "loss": 2.658, "step": 1696 }, { "epoch": 16.97, "grad_norm": 124.93379211425781, "learning_rate": 4.3700000000000005e-06, "loss": 2.3322, "step": 1697 }, { "epoch": 16.98, "grad_norm": 50.07054138183594, "learning_rate": 4.366666666666667e-06, "loss": 2.9325, "step": 1698 }, { "epoch": 16.99, "grad_norm": 48.075565338134766, "learning_rate": 4.363333333333334e-06, "loss": 3.1891, "step": 1699 }, { "epoch": 17.0, "grad_norm": 96.92658996582031, "learning_rate": 4.360000000000001e-06, "loss": 2.792, "step": 1700 }, { "epoch": 17.0, "eval_loss": 2.7743890285491943, "eval_map": 0.0039, "eval_map_50": 0.0096, "eval_map_75": 0.0028, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0112, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0031, "eval_map_medium": 0.0074, "eval_map_neckline": 0.0129, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.109, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0286, "eval_map_small": 0.0037, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0059, "eval_mar_10": 0.0223, "eval_mar_100": 0.0279, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0469, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1524, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4731, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4713, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0291, "eval_mar_medium": 0.0474, "eval_mar_small": 0.0203, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.582, "eval_samples_per_second": 5.382, "eval_steps_per_second": 1.345, "step": 1700 }, { "epoch": 17.01, "grad_norm": 70.44912719726562, "learning_rate": 4.356666666666667e-06, "loss": 2.6669, "step": 1701 }, { "epoch": 17.02, "grad_norm": 151.4870147705078, "learning_rate": 4.353333333333334e-06, "loss": 2.8216, "step": 1702 }, { "epoch": 17.03, "grad_norm": 61.05984115600586, "learning_rate": 4.350000000000001e-06, "loss": 3.3296, "step": 1703 }, { "epoch": 17.04, "grad_norm": 62.80535125732422, "learning_rate": 4.346666666666667e-06, "loss": 2.8926, "step": 1704 }, { "epoch": 17.05, "grad_norm": 83.64286804199219, "learning_rate": 4.343333333333334e-06, "loss": 2.8012, "step": 1705 }, { "epoch": 17.06, "grad_norm": 135.710693359375, "learning_rate": 4.34e-06, "loss": 2.5302, "step": 1706 }, { "epoch": 17.07, "grad_norm": 34.929222106933594, "learning_rate": 4.336666666666667e-06, "loss": 2.2108, "step": 1707 }, { "epoch": 17.08, "grad_norm": 139.76048278808594, "learning_rate": 4.333333333333334e-06, "loss": 3.8099, "step": 1708 }, { "epoch": 17.09, "grad_norm": 61.35136032104492, "learning_rate": 4.33e-06, "loss": 2.544, "step": 1709 }, { "epoch": 17.1, "grad_norm": 55.194217681884766, "learning_rate": 4.326666666666667e-06, "loss": 2.5282, "step": 1710 }, { "epoch": 17.11, "grad_norm": 30.689586639404297, "learning_rate": 4.323333333333334e-06, "loss": 6.0436, "step": 1711 }, { "epoch": 17.12, "grad_norm": 47.12371063232422, "learning_rate": 4.32e-06, "loss": 3.0519, "step": 1712 }, { "epoch": 17.13, "grad_norm": 37.01338577270508, "learning_rate": 4.316666666666667e-06, "loss": 2.9403, "step": 1713 }, { "epoch": 17.14, "grad_norm": 81.94853210449219, "learning_rate": 4.313333333333334e-06, "loss": 2.541, "step": 1714 }, { "epoch": 17.15, "grad_norm": 56.25660705566406, "learning_rate": 4.31e-06, "loss": 2.6932, "step": 1715 }, { "epoch": 17.16, "grad_norm": 42.51362228393555, "learning_rate": 4.306666666666666e-06, "loss": 3.6764, "step": 1716 }, { "epoch": 17.17, "grad_norm": 70.59624481201172, "learning_rate": 4.303333333333334e-06, "loss": 3.1212, "step": 1717 }, { "epoch": 17.18, "grad_norm": 42.29269027709961, "learning_rate": 4.3e-06, "loss": 3.63, "step": 1718 }, { "epoch": 17.19, "grad_norm": 62.73497009277344, "learning_rate": 4.2966666666666665e-06, "loss": 2.8263, "step": 1719 }, { "epoch": 17.2, "grad_norm": 54.95612335205078, "learning_rate": 4.2933333333333334e-06, "loss": 3.2263, "step": 1720 }, { "epoch": 17.21, "grad_norm": 197.19435119628906, "learning_rate": 4.2900000000000004e-06, "loss": 2.5726, "step": 1721 }, { "epoch": 17.22, "grad_norm": 122.54187774658203, "learning_rate": 4.2866666666666666e-06, "loss": 3.0048, "step": 1722 }, { "epoch": 17.23, "grad_norm": 76.07810974121094, "learning_rate": 4.2833333333333335e-06, "loss": 2.1877, "step": 1723 }, { "epoch": 17.24, "grad_norm": 111.0024642944336, "learning_rate": 4.2800000000000005e-06, "loss": 3.4925, "step": 1724 }, { "epoch": 17.25, "grad_norm": 38.47024917602539, "learning_rate": 4.276666666666667e-06, "loss": 2.4583, "step": 1725 }, { "epoch": 17.26, "grad_norm": 29.511600494384766, "learning_rate": 4.273333333333334e-06, "loss": 2.702, "step": 1726 }, { "epoch": 17.27, "grad_norm": 100.78568267822266, "learning_rate": 4.270000000000001e-06, "loss": 2.0134, "step": 1727 }, { "epoch": 17.28, "grad_norm": 39.54787063598633, "learning_rate": 4.266666666666668e-06, "loss": 2.4992, "step": 1728 }, { "epoch": 17.29, "grad_norm": 47.58334732055664, "learning_rate": 4.263333333333334e-06, "loss": 2.7616, "step": 1729 }, { "epoch": 17.3, "grad_norm": 49.16594314575195, "learning_rate": 4.26e-06, "loss": 2.8194, "step": 1730 }, { "epoch": 17.31, "grad_norm": 43.90185546875, "learning_rate": 4.256666666666668e-06, "loss": 2.7786, "step": 1731 }, { "epoch": 17.32, "grad_norm": 83.2711410522461, "learning_rate": 4.253333333333334e-06, "loss": 3.323, "step": 1732 }, { "epoch": 17.33, "grad_norm": 96.49673461914062, "learning_rate": 4.25e-06, "loss": 3.4659, "step": 1733 }, { "epoch": 17.34, "grad_norm": 284.5299377441406, "learning_rate": 4.246666666666667e-06, "loss": 2.8089, "step": 1734 }, { "epoch": 17.35, "grad_norm": 110.97789001464844, "learning_rate": 4.243333333333334e-06, "loss": 2.7204, "step": 1735 }, { "epoch": 17.36, "grad_norm": 44.30836486816406, "learning_rate": 4.24e-06, "loss": 2.8819, "step": 1736 }, { "epoch": 17.37, "grad_norm": 133.83468627929688, "learning_rate": 4.236666666666667e-06, "loss": 3.8741, "step": 1737 }, { "epoch": 17.38, "grad_norm": 48.69088363647461, "learning_rate": 4.233333333333334e-06, "loss": 2.8852, "step": 1738 }, { "epoch": 17.39, "grad_norm": 59.404273986816406, "learning_rate": 4.23e-06, "loss": 2.338, "step": 1739 }, { "epoch": 17.4, "grad_norm": 137.21197509765625, "learning_rate": 4.226666666666667e-06, "loss": 2.8016, "step": 1740 }, { "epoch": 17.41, "grad_norm": 70.66858673095703, "learning_rate": 4.223333333333334e-06, "loss": 3.27, "step": 1741 }, { "epoch": 17.42, "grad_norm": 55.5930061340332, "learning_rate": 4.22e-06, "loss": 2.5677, "step": 1742 }, { "epoch": 17.43, "grad_norm": 43.064945220947266, "learning_rate": 4.216666666666667e-06, "loss": 2.7481, "step": 1743 }, { "epoch": 17.44, "grad_norm": 55.2343635559082, "learning_rate": 4.213333333333333e-06, "loss": 2.3161, "step": 1744 }, { "epoch": 17.45, "grad_norm": 33.412418365478516, "learning_rate": 4.21e-06, "loss": 3.1801, "step": 1745 }, { "epoch": 17.46, "grad_norm": 44.564815521240234, "learning_rate": 4.206666666666667e-06, "loss": 2.3832, "step": 1746 }, { "epoch": 17.47, "grad_norm": 37.339229583740234, "learning_rate": 4.2033333333333335e-06, "loss": 2.2934, "step": 1747 }, { "epoch": 17.48, "grad_norm": 50.8483772277832, "learning_rate": 4.2000000000000004e-06, "loss": 3.3603, "step": 1748 }, { "epoch": 17.49, "grad_norm": 73.18268585205078, "learning_rate": 4.1966666666666674e-06, "loss": 3.4474, "step": 1749 }, { "epoch": 17.5, "grad_norm": 51.545433044433594, "learning_rate": 4.1933333333333336e-06, "loss": 2.9321, "step": 1750 }, { "epoch": 17.5, "eval_loss": 2.758105993270874, "eval_map": 0.004, "eval_map_50": 0.0099, "eval_map_75": 0.0031, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0106, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0009, "eval_map_medium": 0.0074, "eval_map_neckline": 0.0114, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1172, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0244, "eval_map_small": 0.0039, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0065, "eval_mar_10": 0.0219, "eval_mar_100": 0.0274, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0531, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1429, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4761, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4496, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0285, "eval_mar_medium": 0.0451, "eval_mar_small": 0.0213, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.4446, "eval_samples_per_second": 5.422, "eval_steps_per_second": 1.355, "step": 1750 }, { "epoch": 17.51, "grad_norm": 54.17736053466797, "learning_rate": 4.1900000000000005e-06, "loss": 3.4908, "step": 1751 }, { "epoch": 17.52, "grad_norm": 55.984745025634766, "learning_rate": 4.1866666666666675e-06, "loss": 2.7115, "step": 1752 }, { "epoch": 17.53, "grad_norm": 33.248653411865234, "learning_rate": 4.183333333333334e-06, "loss": 3.2964, "step": 1753 }, { "epoch": 17.54, "grad_norm": 139.6072540283203, "learning_rate": 4.18e-06, "loss": 3.3056, "step": 1754 }, { "epoch": 17.55, "grad_norm": 57.8404541015625, "learning_rate": 4.176666666666668e-06, "loss": 2.4289, "step": 1755 }, { "epoch": 17.56, "grad_norm": 90.94409942626953, "learning_rate": 4.173333333333334e-06, "loss": 2.6428, "step": 1756 }, { "epoch": 17.57, "grad_norm": 35.67275619506836, "learning_rate": 4.17e-06, "loss": 2.6811, "step": 1757 }, { "epoch": 17.58, "grad_norm": 55.36580276489258, "learning_rate": 4.166666666666667e-06, "loss": 2.6071, "step": 1758 }, { "epoch": 17.59, "grad_norm": 41.21548843383789, "learning_rate": 4.163333333333334e-06, "loss": 3.2867, "step": 1759 }, { "epoch": 17.6, "grad_norm": 74.54977416992188, "learning_rate": 4.16e-06, "loss": 3.2546, "step": 1760 }, { "epoch": 17.61, "grad_norm": 33.19369888305664, "learning_rate": 4.156666666666667e-06, "loss": 2.3782, "step": 1761 }, { "epoch": 17.62, "grad_norm": 34.115806579589844, "learning_rate": 4.153333333333334e-06, "loss": 3.5027, "step": 1762 }, { "epoch": 17.63, "grad_norm": 79.30489349365234, "learning_rate": 4.15e-06, "loss": 2.7466, "step": 1763 }, { "epoch": 17.64, "grad_norm": 81.44818115234375, "learning_rate": 4.146666666666667e-06, "loss": 3.8659, "step": 1764 }, { "epoch": 17.65, "grad_norm": 48.200958251953125, "learning_rate": 4.143333333333334e-06, "loss": 3.0873, "step": 1765 }, { "epoch": 17.66, "grad_norm": 39.60981750488281, "learning_rate": 4.14e-06, "loss": 2.6047, "step": 1766 }, { "epoch": 17.67, "grad_norm": 29.686006546020508, "learning_rate": 4.136666666666667e-06, "loss": 2.6395, "step": 1767 }, { "epoch": 17.68, "grad_norm": 67.99470520019531, "learning_rate": 4.133333333333333e-06, "loss": 3.4554, "step": 1768 }, { "epoch": 17.69, "grad_norm": 80.48687744140625, "learning_rate": 4.13e-06, "loss": 3.1351, "step": 1769 }, { "epoch": 17.7, "grad_norm": 155.22120666503906, "learning_rate": 4.126666666666667e-06, "loss": 3.2834, "step": 1770 }, { "epoch": 17.71, "grad_norm": 101.75020599365234, "learning_rate": 4.123333333333333e-06, "loss": 2.8136, "step": 1771 }, { "epoch": 17.72, "grad_norm": 50.348995208740234, "learning_rate": 4.12e-06, "loss": 3.0027, "step": 1772 }, { "epoch": 17.73, "grad_norm": 179.58926391601562, "learning_rate": 4.116666666666667e-06, "loss": 3.238, "step": 1773 }, { "epoch": 17.74, "grad_norm": 57.62267303466797, "learning_rate": 4.1133333333333335e-06, "loss": 2.6513, "step": 1774 }, { "epoch": 17.75, "grad_norm": 48.37411117553711, "learning_rate": 4.1100000000000005e-06, "loss": 2.1217, "step": 1775 }, { "epoch": 17.76, "grad_norm": 40.5620002746582, "learning_rate": 4.1066666666666674e-06, "loss": 2.5556, "step": 1776 }, { "epoch": 17.77, "grad_norm": 56.46894073486328, "learning_rate": 4.1033333333333336e-06, "loss": 2.6839, "step": 1777 }, { "epoch": 17.78, "grad_norm": 28.727365493774414, "learning_rate": 4.1e-06, "loss": 2.9261, "step": 1778 }, { "epoch": 17.79, "grad_norm": 53.51795959472656, "learning_rate": 4.0966666666666675e-06, "loss": 2.3977, "step": 1779 }, { "epoch": 17.8, "grad_norm": 32.401424407958984, "learning_rate": 4.093333333333334e-06, "loss": 3.2483, "step": 1780 }, { "epoch": 17.81, "grad_norm": 107.86668395996094, "learning_rate": 4.09e-06, "loss": 3.1729, "step": 1781 }, { "epoch": 17.82, "grad_norm": 58.412452697753906, "learning_rate": 4.086666666666667e-06, "loss": 2.7473, "step": 1782 }, { "epoch": 17.83, "grad_norm": 39.12955856323242, "learning_rate": 4.083333333333334e-06, "loss": 5.8244, "step": 1783 }, { "epoch": 17.84, "grad_norm": 235.36114501953125, "learning_rate": 4.08e-06, "loss": 3.0715, "step": 1784 }, { "epoch": 17.85, "grad_norm": 53.82994079589844, "learning_rate": 4.076666666666667e-06, "loss": 2.4803, "step": 1785 }, { "epoch": 17.86, "grad_norm": 59.21012496948242, "learning_rate": 4.073333333333334e-06, "loss": 3.1225, "step": 1786 }, { "epoch": 17.87, "grad_norm": 86.5704116821289, "learning_rate": 4.07e-06, "loss": 3.4656, "step": 1787 }, { "epoch": 17.88, "grad_norm": 116.05509185791016, "learning_rate": 4.066666666666667e-06, "loss": 2.9404, "step": 1788 }, { "epoch": 17.89, "grad_norm": 64.2020034790039, "learning_rate": 4.063333333333334e-06, "loss": 3.0755, "step": 1789 }, { "epoch": 17.9, "grad_norm": 66.30765533447266, "learning_rate": 4.060000000000001e-06, "loss": 2.7421, "step": 1790 }, { "epoch": 17.91, "grad_norm": 70.66299438476562, "learning_rate": 4.056666666666667e-06, "loss": 3.7658, "step": 1791 }, { "epoch": 17.92, "grad_norm": 34.69910430908203, "learning_rate": 4.053333333333333e-06, "loss": 3.0214, "step": 1792 }, { "epoch": 17.93, "grad_norm": 122.93827819824219, "learning_rate": 4.05e-06, "loss": 2.7023, "step": 1793 }, { "epoch": 17.94, "grad_norm": 151.31141662597656, "learning_rate": 4.046666666666667e-06, "loss": 3.4019, "step": 1794 }, { "epoch": 17.95, "grad_norm": 340.618896484375, "learning_rate": 4.043333333333333e-06, "loss": 2.6059, "step": 1795 }, { "epoch": 17.96, "grad_norm": 44.60565948486328, "learning_rate": 4.04e-06, "loss": 3.0221, "step": 1796 }, { "epoch": 17.97, "grad_norm": 167.896240234375, "learning_rate": 4.036666666666667e-06, "loss": 3.3483, "step": 1797 }, { "epoch": 17.98, "grad_norm": 70.22254180908203, "learning_rate": 4.033333333333333e-06, "loss": 2.4624, "step": 1798 }, { "epoch": 17.99, "grad_norm": 57.89513397216797, "learning_rate": 4.03e-06, "loss": 3.2404, "step": 1799 }, { "epoch": 18.0, "grad_norm": 68.85315704345703, "learning_rate": 4.026666666666667e-06, "loss": 2.6387, "step": 1800 }, { "epoch": 18.0, "eval_loss": 2.8029069900512695, "eval_map": 0.0042, "eval_map_50": 0.0099, "eval_map_75": 0.0027, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0058, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.001, "eval_map_medium": 0.0075, "eval_map_neckline": 0.0117, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1273, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0265, "eval_map_small": 0.0042, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0062, "eval_mar_10": 0.0203, "eval_mar_100": 0.0258, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0347, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1063, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4858, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4296, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0306, "eval_mar_medium": 0.0419, "eval_mar_small": 0.0206, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.0417, "eval_samples_per_second": 5.252, "eval_steps_per_second": 1.313, "step": 1800 }, { "epoch": 18.01, "grad_norm": 50.13893508911133, "learning_rate": 4.0233333333333335e-06, "loss": 2.3471, "step": 1801 }, { "epoch": 18.02, "grad_norm": 104.28643798828125, "learning_rate": 4.0200000000000005e-06, "loss": 2.8839, "step": 1802 }, { "epoch": 18.03, "grad_norm": 430.1461486816406, "learning_rate": 4.0166666666666675e-06, "loss": 2.6168, "step": 1803 }, { "epoch": 18.04, "grad_norm": 91.08721160888672, "learning_rate": 4.013333333333334e-06, "loss": 2.4261, "step": 1804 }, { "epoch": 18.05, "grad_norm": 90.69356536865234, "learning_rate": 4.0100000000000006e-06, "loss": 2.4911, "step": 1805 }, { "epoch": 18.06, "grad_norm": 41.461116790771484, "learning_rate": 4.006666666666667e-06, "loss": 2.6966, "step": 1806 }, { "epoch": 18.07, "grad_norm": 36.372501373291016, "learning_rate": 4.003333333333334e-06, "loss": 2.929, "step": 1807 }, { "epoch": 18.08, "grad_norm": 51.16105270385742, "learning_rate": 4.000000000000001e-06, "loss": 2.5421, "step": 1808 }, { "epoch": 18.09, "grad_norm": 44.19743728637695, "learning_rate": 3.996666666666667e-06, "loss": 2.6466, "step": 1809 }, { "epoch": 18.1, "grad_norm": 55.681785583496094, "learning_rate": 3.993333333333334e-06, "loss": 3.0745, "step": 1810 }, { "epoch": 18.11, "grad_norm": 58.444969177246094, "learning_rate": 3.990000000000001e-06, "loss": 3.011, "step": 1811 }, { "epoch": 18.12, "grad_norm": 27.419649124145508, "learning_rate": 3.986666666666667e-06, "loss": 2.9719, "step": 1812 }, { "epoch": 18.13, "grad_norm": 196.69216918945312, "learning_rate": 3.983333333333334e-06, "loss": 2.5985, "step": 1813 }, { "epoch": 18.14, "grad_norm": 37.669342041015625, "learning_rate": 3.980000000000001e-06, "loss": 2.9016, "step": 1814 }, { "epoch": 18.15, "grad_norm": 44.522281646728516, "learning_rate": 3.976666666666667e-06, "loss": 3.0593, "step": 1815 }, { "epoch": 18.16, "grad_norm": 79.32180786132812, "learning_rate": 3.973333333333333e-06, "loss": 2.1311, "step": 1816 }, { "epoch": 18.17, "grad_norm": 863.90576171875, "learning_rate": 3.97e-06, "loss": 3.4287, "step": 1817 }, { "epoch": 18.18, "grad_norm": 66.36158752441406, "learning_rate": 3.966666666666667e-06, "loss": 3.6109, "step": 1818 }, { "epoch": 18.19, "grad_norm": 44.607696533203125, "learning_rate": 3.963333333333333e-06, "loss": 2.8712, "step": 1819 }, { "epoch": 18.2, "grad_norm": 32.602603912353516, "learning_rate": 3.96e-06, "loss": 3.1118, "step": 1820 }, { "epoch": 18.21, "grad_norm": 75.27528381347656, "learning_rate": 3.956666666666667e-06, "loss": 3.2283, "step": 1821 }, { "epoch": 18.22, "grad_norm": 87.70584869384766, "learning_rate": 3.953333333333333e-06, "loss": 2.5718, "step": 1822 }, { "epoch": 18.23, "grad_norm": 46.57595443725586, "learning_rate": 3.95e-06, "loss": 2.745, "step": 1823 }, { "epoch": 18.24, "grad_norm": 43.127655029296875, "learning_rate": 3.946666666666667e-06, "loss": 2.4139, "step": 1824 }, { "epoch": 18.25, "grad_norm": 194.99575805664062, "learning_rate": 3.943333333333333e-06, "loss": 3.0937, "step": 1825 }, { "epoch": 18.26, "grad_norm": 43.81917190551758, "learning_rate": 3.94e-06, "loss": 3.2886, "step": 1826 }, { "epoch": 18.27, "grad_norm": 196.8323516845703, "learning_rate": 3.936666666666667e-06, "loss": 2.6417, "step": 1827 }, { "epoch": 18.28, "grad_norm": 49.3838996887207, "learning_rate": 3.9333333333333335e-06, "loss": 3.2396, "step": 1828 }, { "epoch": 18.29, "grad_norm": 46.65165710449219, "learning_rate": 3.9300000000000005e-06, "loss": 2.2093, "step": 1829 }, { "epoch": 18.3, "grad_norm": 47.35091781616211, "learning_rate": 3.926666666666667e-06, "loss": 3.0971, "step": 1830 }, { "epoch": 18.31, "grad_norm": 166.96383666992188, "learning_rate": 3.923333333333334e-06, "loss": 2.4063, "step": 1831 }, { "epoch": 18.32, "grad_norm": 45.782981872558594, "learning_rate": 3.920000000000001e-06, "loss": 3.4939, "step": 1832 }, { "epoch": 18.33, "grad_norm": 31.944520950317383, "learning_rate": 3.916666666666667e-06, "loss": 2.6483, "step": 1833 }, { "epoch": 18.34, "grad_norm": 50.69828414916992, "learning_rate": 3.913333333333334e-06, "loss": 3.2261, "step": 1834 }, { "epoch": 18.35, "grad_norm": 47.217369079589844, "learning_rate": 3.910000000000001e-06, "loss": 3.184, "step": 1835 }, { "epoch": 18.36, "grad_norm": 54.45471954345703, "learning_rate": 3.906666666666667e-06, "loss": 3.2512, "step": 1836 }, { "epoch": 18.37, "grad_norm": 39.482582092285156, "learning_rate": 3.903333333333334e-06, "loss": 3.1171, "step": 1837 }, { "epoch": 18.38, "grad_norm": 333.3099060058594, "learning_rate": 3.900000000000001e-06, "loss": 3.2857, "step": 1838 }, { "epoch": 18.39, "grad_norm": 46.81970977783203, "learning_rate": 3.896666666666667e-06, "loss": 6.144, "step": 1839 }, { "epoch": 18.4, "grad_norm": 50.27552795410156, "learning_rate": 3.893333333333333e-06, "loss": 2.7579, "step": 1840 }, { "epoch": 18.41, "grad_norm": 105.71690368652344, "learning_rate": 3.89e-06, "loss": 2.3265, "step": 1841 }, { "epoch": 18.42, "grad_norm": 642.1229858398438, "learning_rate": 3.886666666666667e-06, "loss": 2.928, "step": 1842 }, { "epoch": 18.43, "grad_norm": 33.14851379394531, "learning_rate": 3.883333333333333e-06, "loss": 3.4507, "step": 1843 }, { "epoch": 18.44, "grad_norm": 55.59370040893555, "learning_rate": 3.88e-06, "loss": 2.4596, "step": 1844 }, { "epoch": 18.45, "grad_norm": 35.93391799926758, "learning_rate": 3.876666666666667e-06, "loss": 3.1318, "step": 1845 }, { "epoch": 18.46, "grad_norm": 67.71118927001953, "learning_rate": 3.873333333333333e-06, "loss": 2.6289, "step": 1846 }, { "epoch": 18.47, "grad_norm": 49.7383918762207, "learning_rate": 3.87e-06, "loss": 3.3945, "step": 1847 }, { "epoch": 18.48, "grad_norm": 112.98335266113281, "learning_rate": 3.866666666666667e-06, "loss": 2.2984, "step": 1848 }, { "epoch": 18.49, "grad_norm": 87.41187286376953, "learning_rate": 3.863333333333333e-06, "loss": 2.4407, "step": 1849 }, { "epoch": 18.5, "grad_norm": 84.44141387939453, "learning_rate": 3.86e-06, "loss": 3.8495, "step": 1850 }, { "epoch": 18.5, "eval_loss": 2.7667477130889893, "eval_map": 0.0042, "eval_map_50": 0.0104, "eval_map_75": 0.0031, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0111, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0012, "eval_map_medium": 0.0081, "eval_map_neckline": 0.0126, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1227, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0262, "eval_map_small": 0.0039, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0066, "eval_mar_10": 0.0223, "eval_mar_100": 0.0277, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0551, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1206, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5104, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4513, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0311, "eval_mar_medium": 0.044, "eval_mar_small": 0.0223, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.3643, "eval_samples_per_second": 5.445, "eval_steps_per_second": 1.361, "step": 1850 }, { "epoch": 18.51, "grad_norm": 84.13770294189453, "learning_rate": 3.856666666666667e-06, "loss": 1.9746, "step": 1851 }, { "epoch": 18.52, "grad_norm": 114.1903305053711, "learning_rate": 3.853333333333334e-06, "loss": 3.2237, "step": 1852 }, { "epoch": 18.53, "grad_norm": 65.36096954345703, "learning_rate": 3.85e-06, "loss": 2.9233, "step": 1853 }, { "epoch": 18.54, "grad_norm": 29.578887939453125, "learning_rate": 3.8466666666666665e-06, "loss": 2.9004, "step": 1854 }, { "epoch": 18.55, "grad_norm": 86.3124008178711, "learning_rate": 3.8433333333333335e-06, "loss": 2.0119, "step": 1855 }, { "epoch": 18.56, "grad_norm": 130.24746704101562, "learning_rate": 3.8400000000000005e-06, "loss": 3.2803, "step": 1856 }, { "epoch": 18.57, "grad_norm": 31.429868698120117, "learning_rate": 3.836666666666667e-06, "loss": 5.9401, "step": 1857 }, { "epoch": 18.58, "grad_norm": 40.55575180053711, "learning_rate": 3.833333333333334e-06, "loss": 2.5883, "step": 1858 }, { "epoch": 18.59, "grad_norm": 56.65231704711914, "learning_rate": 3.830000000000001e-06, "loss": 2.9632, "step": 1859 }, { "epoch": 18.6, "grad_norm": 74.46727752685547, "learning_rate": 3.826666666666667e-06, "loss": 3.0582, "step": 1860 }, { "epoch": 18.61, "grad_norm": 111.60523223876953, "learning_rate": 3.823333333333334e-06, "loss": 3.952, "step": 1861 }, { "epoch": 18.62, "grad_norm": 392.1250915527344, "learning_rate": 3.820000000000001e-06, "loss": 2.9948, "step": 1862 }, { "epoch": 18.63, "grad_norm": 126.6686782836914, "learning_rate": 3.816666666666667e-06, "loss": 2.8078, "step": 1863 }, { "epoch": 18.64, "grad_norm": 67.25521087646484, "learning_rate": 3.813333333333334e-06, "loss": 2.5436, "step": 1864 }, { "epoch": 18.65, "grad_norm": 46.409454345703125, "learning_rate": 3.8100000000000004e-06, "loss": 2.708, "step": 1865 }, { "epoch": 18.66, "grad_norm": 119.85871887207031, "learning_rate": 3.806666666666667e-06, "loss": 3.3534, "step": 1866 }, { "epoch": 18.67, "grad_norm": 70.47321319580078, "learning_rate": 3.803333333333334e-06, "loss": 3.3954, "step": 1867 }, { "epoch": 18.68, "grad_norm": 75.99524688720703, "learning_rate": 3.8000000000000005e-06, "loss": 3.1256, "step": 1868 }, { "epoch": 18.69, "grad_norm": 43.585872650146484, "learning_rate": 3.796666666666667e-06, "loss": 2.4249, "step": 1869 }, { "epoch": 18.7, "grad_norm": 64.32867431640625, "learning_rate": 3.793333333333334e-06, "loss": 2.9328, "step": 1870 }, { "epoch": 18.71, "grad_norm": 27.589397430419922, "learning_rate": 3.79e-06, "loss": 2.8437, "step": 1871 }, { "epoch": 18.72, "grad_norm": 49.781349182128906, "learning_rate": 3.7866666666666667e-06, "loss": 2.938, "step": 1872 }, { "epoch": 18.73, "grad_norm": 37.41381072998047, "learning_rate": 3.7833333333333337e-06, "loss": 2.7135, "step": 1873 }, { "epoch": 18.74, "grad_norm": 96.55085754394531, "learning_rate": 3.7800000000000002e-06, "loss": 3.1215, "step": 1874 }, { "epoch": 18.75, "grad_norm": 51.05549240112305, "learning_rate": 3.776666666666667e-06, "loss": 2.7349, "step": 1875 }, { "epoch": 18.76, "grad_norm": 36.59074401855469, "learning_rate": 3.7733333333333338e-06, "loss": 2.1489, "step": 1876 }, { "epoch": 18.77, "grad_norm": 46.051448822021484, "learning_rate": 3.7700000000000003e-06, "loss": 2.4474, "step": 1877 }, { "epoch": 18.78, "grad_norm": 46.940670013427734, "learning_rate": 3.766666666666667e-06, "loss": 2.9995, "step": 1878 }, { "epoch": 18.79, "grad_norm": 161.09243774414062, "learning_rate": 3.763333333333334e-06, "loss": 2.577, "step": 1879 }, { "epoch": 18.8, "grad_norm": 43.94008255004883, "learning_rate": 3.7600000000000004e-06, "loss": 2.9555, "step": 1880 }, { "epoch": 18.81, "grad_norm": 32.72396469116211, "learning_rate": 3.756666666666667e-06, "loss": 4.2251, "step": 1881 }, { "epoch": 18.82, "grad_norm": 42.36888122558594, "learning_rate": 3.753333333333334e-06, "loss": 3.1126, "step": 1882 }, { "epoch": 18.83, "grad_norm": 262.6693420410156, "learning_rate": 3.7500000000000005e-06, "loss": 2.8873, "step": 1883 }, { "epoch": 18.84, "grad_norm": 79.21839141845703, "learning_rate": 3.7466666666666667e-06, "loss": 3.1502, "step": 1884 }, { "epoch": 18.85, "grad_norm": 90.53414916992188, "learning_rate": 3.7433333333333336e-06, "loss": 3.463, "step": 1885 }, { "epoch": 18.86, "grad_norm": 43.574676513671875, "learning_rate": 3.74e-06, "loss": 2.2947, "step": 1886 }, { "epoch": 18.87, "grad_norm": 221.1454315185547, "learning_rate": 3.7366666666666667e-06, "loss": 2.1768, "step": 1887 }, { "epoch": 18.88, "grad_norm": 75.45902252197266, "learning_rate": 3.7333333333333337e-06, "loss": 2.6444, "step": 1888 }, { "epoch": 18.89, "grad_norm": 79.23594665527344, "learning_rate": 3.7300000000000003e-06, "loss": 2.8124, "step": 1889 }, { "epoch": 18.9, "grad_norm": 46.366485595703125, "learning_rate": 3.726666666666667e-06, "loss": 2.2763, "step": 1890 }, { "epoch": 18.91, "grad_norm": 41.26990509033203, "learning_rate": 3.723333333333334e-06, "loss": 2.5357, "step": 1891 }, { "epoch": 18.92, "grad_norm": 138.97540283203125, "learning_rate": 3.7200000000000004e-06, "loss": 2.7352, "step": 1892 }, { "epoch": 18.93, "grad_norm": 61.80579376220703, "learning_rate": 3.716666666666667e-06, "loss": 3.0298, "step": 1893 }, { "epoch": 18.94, "grad_norm": 47.6654052734375, "learning_rate": 3.713333333333334e-06, "loss": 3.0441, "step": 1894 }, { "epoch": 18.95, "grad_norm": 66.17101287841797, "learning_rate": 3.7100000000000005e-06, "loss": 2.2668, "step": 1895 }, { "epoch": 18.96, "grad_norm": 107.9349136352539, "learning_rate": 3.7066666666666666e-06, "loss": 2.8215, "step": 1896 }, { "epoch": 18.97, "grad_norm": 41.9974365234375, "learning_rate": 3.7033333333333336e-06, "loss": 2.7133, "step": 1897 }, { "epoch": 18.98, "grad_norm": 54.9355583190918, "learning_rate": 3.7e-06, "loss": 1.9666, "step": 1898 }, { "epoch": 18.99, "grad_norm": 65.89382934570312, "learning_rate": 3.6966666666666667e-06, "loss": 3.5599, "step": 1899 }, { "epoch": 19.0, "grad_norm": 24.43840980529785, "learning_rate": 3.6933333333333337e-06, "loss": 3.0583, "step": 1900 }, { "epoch": 19.0, "eval_loss": 2.750284433364868, "eval_map": 0.0043, "eval_map_50": 0.0097, "eval_map_75": 0.0035, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0165, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0012, "eval_map_medium": 0.0076, "eval_map_neckline": 0.0136, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1205, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0275, "eval_map_small": 0.0042, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0076, "eval_mar_10": 0.0233, "eval_mar_100": 0.0291, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0837, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1476, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5097, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4513, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0321, "eval_mar_medium": 0.0467, "eval_mar_small": 0.0215, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.5659, "eval_samples_per_second": 5.386, "eval_steps_per_second": 1.347, "step": 1900 }, { "epoch": 19.01, "grad_norm": 76.54956817626953, "learning_rate": 3.6900000000000002e-06, "loss": 2.5332, "step": 1901 }, { "epoch": 19.02, "grad_norm": 108.57859802246094, "learning_rate": 3.686666666666667e-06, "loss": 2.7046, "step": 1902 }, { "epoch": 19.03, "grad_norm": 70.50140380859375, "learning_rate": 3.6833333333333338e-06, "loss": 3.1596, "step": 1903 }, { "epoch": 19.04, "grad_norm": 120.8152084350586, "learning_rate": 3.6800000000000003e-06, "loss": 2.8576, "step": 1904 }, { "epoch": 19.05, "grad_norm": 119.4213638305664, "learning_rate": 3.676666666666667e-06, "loss": 3.2785, "step": 1905 }, { "epoch": 19.06, "grad_norm": 997.3939208984375, "learning_rate": 3.673333333333334e-06, "loss": 2.9706, "step": 1906 }, { "epoch": 19.07, "grad_norm": 51.78649139404297, "learning_rate": 3.6700000000000004e-06, "loss": 3.0529, "step": 1907 }, { "epoch": 19.08, "grad_norm": 92.17501831054688, "learning_rate": 3.6666666666666666e-06, "loss": 2.9929, "step": 1908 }, { "epoch": 19.09, "grad_norm": 88.65013885498047, "learning_rate": 3.6633333333333336e-06, "loss": 2.8537, "step": 1909 }, { "epoch": 19.1, "grad_norm": 213.42947387695312, "learning_rate": 3.66e-06, "loss": 3.6314, "step": 1910 }, { "epoch": 19.11, "grad_norm": 41.40937805175781, "learning_rate": 3.6566666666666667e-06, "loss": 2.6919, "step": 1911 }, { "epoch": 19.12, "grad_norm": 44.80816650390625, "learning_rate": 3.6533333333333336e-06, "loss": 2.9933, "step": 1912 }, { "epoch": 19.13, "grad_norm": 43.62547302246094, "learning_rate": 3.65e-06, "loss": 2.9052, "step": 1913 }, { "epoch": 19.14, "grad_norm": 101.26763916015625, "learning_rate": 3.6466666666666668e-06, "loss": 2.6631, "step": 1914 }, { "epoch": 19.15, "grad_norm": 40.858360290527344, "learning_rate": 3.6433333333333337e-06, "loss": 2.6246, "step": 1915 }, { "epoch": 19.16, "grad_norm": 39.11173629760742, "learning_rate": 3.6400000000000003e-06, "loss": 2.8542, "step": 1916 }, { "epoch": 19.17, "grad_norm": 73.76329803466797, "learning_rate": 3.6366666666666673e-06, "loss": 3.0162, "step": 1917 }, { "epoch": 19.18, "grad_norm": 53.58188247680664, "learning_rate": 3.633333333333334e-06, "loss": 3.1628, "step": 1918 }, { "epoch": 19.19, "grad_norm": 46.95214080810547, "learning_rate": 3.6300000000000004e-06, "loss": 3.1871, "step": 1919 }, { "epoch": 19.2, "grad_norm": 35.897247314453125, "learning_rate": 3.6266666666666674e-06, "loss": 5.9187, "step": 1920 }, { "epoch": 19.21, "grad_norm": 141.66331481933594, "learning_rate": 3.6233333333333335e-06, "loss": 3.0658, "step": 1921 }, { "epoch": 19.22, "grad_norm": 95.42481231689453, "learning_rate": 3.62e-06, "loss": 2.2755, "step": 1922 }, { "epoch": 19.23, "grad_norm": 30.828433990478516, "learning_rate": 3.616666666666667e-06, "loss": 2.5458, "step": 1923 }, { "epoch": 19.24, "grad_norm": 31.567127227783203, "learning_rate": 3.6133333333333336e-06, "loss": 2.7222, "step": 1924 }, { "epoch": 19.25, "grad_norm": 51.88715362548828, "learning_rate": 3.61e-06, "loss": 2.4844, "step": 1925 }, { "epoch": 19.26, "grad_norm": 56.331329345703125, "learning_rate": 3.606666666666667e-06, "loss": 3.2547, "step": 1926 }, { "epoch": 19.27, "grad_norm": 41.5784797668457, "learning_rate": 3.6033333333333337e-06, "loss": 3.1916, "step": 1927 }, { "epoch": 19.28, "grad_norm": 59.063804626464844, "learning_rate": 3.6000000000000003e-06, "loss": 2.4846, "step": 1928 }, { "epoch": 19.29, "grad_norm": 63.86281967163086, "learning_rate": 3.5966666666666672e-06, "loss": 2.8522, "step": 1929 }, { "epoch": 19.3, "grad_norm": 101.37970733642578, "learning_rate": 3.593333333333334e-06, "loss": 3.1175, "step": 1930 }, { "epoch": 19.31, "grad_norm": 47.87706756591797, "learning_rate": 3.5900000000000004e-06, "loss": 3.4745, "step": 1931 }, { "epoch": 19.32, "grad_norm": 32.51998519897461, "learning_rate": 3.5866666666666673e-06, "loss": 2.5392, "step": 1932 }, { "epoch": 19.33, "grad_norm": 54.31663131713867, "learning_rate": 3.5833333333333335e-06, "loss": 2.7413, "step": 1933 }, { "epoch": 19.34, "grad_norm": 78.005615234375, "learning_rate": 3.58e-06, "loss": 3.2071, "step": 1934 }, { "epoch": 19.35, "grad_norm": 40.06574630737305, "learning_rate": 3.576666666666667e-06, "loss": 2.9255, "step": 1935 }, { "epoch": 19.36, "grad_norm": 61.69606399536133, "learning_rate": 3.5733333333333336e-06, "loss": 5.824, "step": 1936 }, { "epoch": 19.37, "grad_norm": 30.10831069946289, "learning_rate": 3.57e-06, "loss": 2.3842, "step": 1937 }, { "epoch": 19.38, "grad_norm": 43.41775894165039, "learning_rate": 3.566666666666667e-06, "loss": 3.1785, "step": 1938 }, { "epoch": 19.39, "grad_norm": 123.57652282714844, "learning_rate": 3.5633333333333337e-06, "loss": 2.8497, "step": 1939 }, { "epoch": 19.4, "grad_norm": 34.72128677368164, "learning_rate": 3.5600000000000002e-06, "loss": 2.8647, "step": 1940 }, { "epoch": 19.41, "grad_norm": 47.99922561645508, "learning_rate": 3.556666666666667e-06, "loss": 2.8552, "step": 1941 }, { "epoch": 19.42, "grad_norm": 41.828304290771484, "learning_rate": 3.5533333333333338e-06, "loss": 2.385, "step": 1942 }, { "epoch": 19.43, "grad_norm": 71.26515197753906, "learning_rate": 3.5500000000000003e-06, "loss": 2.7754, "step": 1943 }, { "epoch": 19.44, "grad_norm": 419.7491149902344, "learning_rate": 3.5466666666666673e-06, "loss": 2.6582, "step": 1944 }, { "epoch": 19.45, "grad_norm": 41.7657470703125, "learning_rate": 3.5433333333333334e-06, "loss": 2.5401, "step": 1945 }, { "epoch": 19.46, "grad_norm": 88.21214294433594, "learning_rate": 3.54e-06, "loss": 3.4658, "step": 1946 }, { "epoch": 19.47, "grad_norm": 64.97106170654297, "learning_rate": 3.536666666666667e-06, "loss": 2.7523, "step": 1947 }, { "epoch": 19.48, "grad_norm": 91.06184387207031, "learning_rate": 3.5333333333333335e-06, "loss": 2.3376, "step": 1948 }, { "epoch": 19.49, "grad_norm": 45.835567474365234, "learning_rate": 3.53e-06, "loss": 2.8756, "step": 1949 }, { "epoch": 19.5, "grad_norm": 192.08363342285156, "learning_rate": 3.526666666666667e-06, "loss": 2.8411, "step": 1950 }, { "epoch": 19.5, "eval_loss": 2.7440285682678223, "eval_map": 0.0043, "eval_map_50": 0.0095, "eval_map_75": 0.0036, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0144, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0012, "eval_map_medium": 0.0078, "eval_map_neckline": 0.011, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1194, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0298, "eval_map_small": 0.0039, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0079, "eval_mar_10": 0.0232, "eval_mar_100": 0.0288, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0653, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1302, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5209, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4661, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0345, "eval_mar_medium": 0.0454, "eval_mar_small": 0.0227, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.0202, "eval_samples_per_second": 5.258, "eval_steps_per_second": 1.314, "step": 1950 }, { "epoch": 19.51, "grad_norm": 34.67411804199219, "learning_rate": 3.5233333333333336e-06, "loss": 2.4438, "step": 1951 }, { "epoch": 19.52, "grad_norm": 73.698974609375, "learning_rate": 3.52e-06, "loss": 2.3084, "step": 1952 }, { "epoch": 19.53, "grad_norm": 46.25621795654297, "learning_rate": 3.516666666666667e-06, "loss": 2.6092, "step": 1953 }, { "epoch": 19.54, "grad_norm": 59.446895599365234, "learning_rate": 3.5133333333333337e-06, "loss": 2.5967, "step": 1954 }, { "epoch": 19.55, "grad_norm": 47.835025787353516, "learning_rate": 3.5100000000000003e-06, "loss": 2.8565, "step": 1955 }, { "epoch": 19.56, "grad_norm": 67.83891296386719, "learning_rate": 3.5066666666666673e-06, "loss": 3.817, "step": 1956 }, { "epoch": 19.57, "grad_norm": 67.26541137695312, "learning_rate": 3.5033333333333334e-06, "loss": 3.4107, "step": 1957 }, { "epoch": 19.58, "grad_norm": 55.3552360534668, "learning_rate": 3.5e-06, "loss": 2.8221, "step": 1958 }, { "epoch": 19.59, "grad_norm": 56.430850982666016, "learning_rate": 3.496666666666667e-06, "loss": 3.0301, "step": 1959 }, { "epoch": 19.6, "grad_norm": 40.6735725402832, "learning_rate": 3.4933333333333335e-06, "loss": 2.1538, "step": 1960 }, { "epoch": 19.61, "grad_norm": 59.63552474975586, "learning_rate": 3.49e-06, "loss": 2.5682, "step": 1961 }, { "epoch": 19.62, "grad_norm": 36.54388427734375, "learning_rate": 3.486666666666667e-06, "loss": 2.5509, "step": 1962 }, { "epoch": 19.63, "grad_norm": 69.45423126220703, "learning_rate": 3.4833333333333336e-06, "loss": 2.7445, "step": 1963 }, { "epoch": 19.64, "grad_norm": 40.71404266357422, "learning_rate": 3.48e-06, "loss": 3.2094, "step": 1964 }, { "epoch": 19.65, "grad_norm": 49.416439056396484, "learning_rate": 3.476666666666667e-06, "loss": 3.0673, "step": 1965 }, { "epoch": 19.66, "grad_norm": 214.6049041748047, "learning_rate": 3.4733333333333337e-06, "loss": 3.4015, "step": 1966 }, { "epoch": 19.67, "grad_norm": 36.940208435058594, "learning_rate": 3.4700000000000002e-06, "loss": 2.7075, "step": 1967 }, { "epoch": 19.68, "grad_norm": 47.71488571166992, "learning_rate": 3.4666666666666672e-06, "loss": 3.5347, "step": 1968 }, { "epoch": 19.69, "grad_norm": 35.63603973388672, "learning_rate": 3.4633333333333333e-06, "loss": 3.1419, "step": 1969 }, { "epoch": 19.7, "grad_norm": 215.12686157226562, "learning_rate": 3.46e-06, "loss": 2.1993, "step": 1970 }, { "epoch": 19.71, "grad_norm": 222.8508758544922, "learning_rate": 3.456666666666667e-06, "loss": 2.9336, "step": 1971 }, { "epoch": 19.72, "grad_norm": 61.692405700683594, "learning_rate": 3.4533333333333334e-06, "loss": 2.8853, "step": 1972 }, { "epoch": 19.73, "grad_norm": 67.8251953125, "learning_rate": 3.45e-06, "loss": 2.5798, "step": 1973 }, { "epoch": 19.74, "grad_norm": 40.691383361816406, "learning_rate": 3.446666666666667e-06, "loss": 2.5401, "step": 1974 }, { "epoch": 19.75, "grad_norm": 58.84613800048828, "learning_rate": 3.4433333333333335e-06, "loss": 2.707, "step": 1975 }, { "epoch": 19.76, "grad_norm": 64.07059478759766, "learning_rate": 3.44e-06, "loss": 2.9319, "step": 1976 }, { "epoch": 19.77, "grad_norm": 75.66726684570312, "learning_rate": 3.436666666666667e-06, "loss": 2.7461, "step": 1977 }, { "epoch": 19.78, "grad_norm": 47.05839157104492, "learning_rate": 3.4333333333333336e-06, "loss": 3.214, "step": 1978 }, { "epoch": 19.79, "grad_norm": 98.16157531738281, "learning_rate": 3.4300000000000006e-06, "loss": 2.6192, "step": 1979 }, { "epoch": 19.8, "grad_norm": 103.96009063720703, "learning_rate": 3.426666666666667e-06, "loss": 3.2303, "step": 1980 }, { "epoch": 19.81, "grad_norm": 49.17841720581055, "learning_rate": 3.4233333333333333e-06, "loss": 3.2623, "step": 1981 }, { "epoch": 19.82, "grad_norm": 50.526309967041016, "learning_rate": 3.4200000000000007e-06, "loss": 3.5103, "step": 1982 }, { "epoch": 19.83, "grad_norm": 33.91248321533203, "learning_rate": 3.416666666666667e-06, "loss": 3.056, "step": 1983 }, { "epoch": 19.84, "grad_norm": 42.89253234863281, "learning_rate": 3.4133333333333334e-06, "loss": 2.0918, "step": 1984 }, { "epoch": 19.85, "grad_norm": 40.185882568359375, "learning_rate": 3.4100000000000004e-06, "loss": 3.322, "step": 1985 }, { "epoch": 19.86, "grad_norm": 125.57353210449219, "learning_rate": 3.406666666666667e-06, "loss": 3.1015, "step": 1986 }, { "epoch": 19.87, "grad_norm": 58.22526931762695, "learning_rate": 3.4033333333333335e-06, "loss": 3.098, "step": 1987 }, { "epoch": 19.88, "grad_norm": 135.90789794921875, "learning_rate": 3.4000000000000005e-06, "loss": 2.2548, "step": 1988 }, { "epoch": 19.89, "grad_norm": 34.9175910949707, "learning_rate": 3.396666666666667e-06, "loss": 3.233, "step": 1989 }, { "epoch": 19.9, "grad_norm": 47.39383316040039, "learning_rate": 3.3933333333333336e-06, "loss": 3.4337, "step": 1990 }, { "epoch": 19.91, "grad_norm": 80.74998474121094, "learning_rate": 3.3900000000000006e-06, "loss": 2.8075, "step": 1991 }, { "epoch": 19.92, "grad_norm": 59.27268600463867, "learning_rate": 3.386666666666667e-06, "loss": 2.8768, "step": 1992 }, { "epoch": 19.93, "grad_norm": 131.74871826171875, "learning_rate": 3.3833333333333333e-06, "loss": 2.9927, "step": 1993 }, { "epoch": 19.94, "grad_norm": 168.75839233398438, "learning_rate": 3.3800000000000007e-06, "loss": 1.8935, "step": 1994 }, { "epoch": 19.95, "grad_norm": 100.0290756225586, "learning_rate": 3.376666666666667e-06, "loss": 3.2371, "step": 1995 }, { "epoch": 19.96, "grad_norm": 233.23463439941406, "learning_rate": 3.3733333333333334e-06, "loss": 2.0646, "step": 1996 }, { "epoch": 19.97, "grad_norm": 48.90861511230469, "learning_rate": 3.3700000000000003e-06, "loss": 2.9273, "step": 1997 }, { "epoch": 19.98, "grad_norm": 45.16669464111328, "learning_rate": 3.366666666666667e-06, "loss": 2.2915, "step": 1998 }, { "epoch": 19.99, "grad_norm": 97.93034362792969, "learning_rate": 3.3633333333333335e-06, "loss": 2.6575, "step": 1999 }, { "epoch": 20.0, "grad_norm": 55.85203552246094, "learning_rate": 3.3600000000000004e-06, "loss": 3.0183, "step": 2000 }, { "epoch": 20.0, "eval_loss": 2.7457964420318604, "eval_map": 0.0042, "eval_map_50": 0.0098, "eval_map_75": 0.0032, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0227, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0018, "eval_map_medium": 0.0081, "eval_map_neckline": 0.0152, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1074, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0286, "eval_map_small": 0.0035, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0089, "eval_mar_10": 0.0241, "eval_mar_100": 0.0296, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.102, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1619, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4955, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4539, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0375, "eval_mar_medium": 0.046, "eval_mar_small": 0.0218, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.8444, "eval_samples_per_second": 5.307, "eval_steps_per_second": 1.327, "step": 2000 }, { "epoch": 20.01, "grad_norm": 38.590824127197266, "learning_rate": 3.356666666666667e-06, "loss": 2.8447, "step": 2001 }, { "epoch": 20.02, "grad_norm": 50.8958740234375, "learning_rate": 3.3533333333333336e-06, "loss": 2.6237, "step": 2002 }, { "epoch": 20.03, "grad_norm": 58.47087478637695, "learning_rate": 3.3500000000000005e-06, "loss": 3.2897, "step": 2003 }, { "epoch": 20.04, "grad_norm": 53.43913650512695, "learning_rate": 3.346666666666667e-06, "loss": 2.7751, "step": 2004 }, { "epoch": 20.05, "grad_norm": 196.89231872558594, "learning_rate": 3.3433333333333332e-06, "loss": 3.0561, "step": 2005 }, { "epoch": 20.06, "grad_norm": 130.00717163085938, "learning_rate": 3.3400000000000006e-06, "loss": 2.8063, "step": 2006 }, { "epoch": 20.07, "grad_norm": 106.51069641113281, "learning_rate": 3.3366666666666668e-06, "loss": 2.6293, "step": 2007 }, { "epoch": 20.08, "grad_norm": 125.53428649902344, "learning_rate": 3.3333333333333333e-06, "loss": 3.1819, "step": 2008 }, { "epoch": 20.09, "grad_norm": 64.19740295410156, "learning_rate": 3.3300000000000003e-06, "loss": 2.793, "step": 2009 }, { "epoch": 20.1, "grad_norm": 52.832698822021484, "learning_rate": 3.326666666666667e-06, "loss": 2.2103, "step": 2010 }, { "epoch": 20.11, "grad_norm": 40.959476470947266, "learning_rate": 3.3233333333333334e-06, "loss": 3.2195, "step": 2011 }, { "epoch": 20.12, "grad_norm": 197.837158203125, "learning_rate": 3.3200000000000004e-06, "loss": 2.5704, "step": 2012 }, { "epoch": 20.13, "grad_norm": 33.84355163574219, "learning_rate": 3.316666666666667e-06, "loss": 3.413, "step": 2013 }, { "epoch": 20.14, "grad_norm": 60.49812316894531, "learning_rate": 3.3133333333333335e-06, "loss": 2.4694, "step": 2014 }, { "epoch": 20.15, "grad_norm": 61.72014617919922, "learning_rate": 3.3100000000000005e-06, "loss": 2.8796, "step": 2015 }, { "epoch": 20.16, "grad_norm": 67.2596664428711, "learning_rate": 3.306666666666667e-06, "loss": 2.4246, "step": 2016 }, { "epoch": 20.17, "grad_norm": 76.40853118896484, "learning_rate": 3.303333333333333e-06, "loss": 2.33, "step": 2017 }, { "epoch": 20.18, "grad_norm": 103.62599182128906, "learning_rate": 3.3000000000000006e-06, "loss": 3.1552, "step": 2018 }, { "epoch": 20.19, "grad_norm": 22.833663940429688, "learning_rate": 3.2966666666666667e-06, "loss": 2.444, "step": 2019 }, { "epoch": 20.2, "grad_norm": 116.46928405761719, "learning_rate": 3.2933333333333333e-06, "loss": 2.4969, "step": 2020 }, { "epoch": 20.21, "grad_norm": 68.82296752929688, "learning_rate": 3.2900000000000003e-06, "loss": 2.1506, "step": 2021 }, { "epoch": 20.22, "grad_norm": 38.61397171020508, "learning_rate": 3.286666666666667e-06, "loss": 2.8245, "step": 2022 }, { "epoch": 20.23, "grad_norm": 47.43750762939453, "learning_rate": 3.2833333333333334e-06, "loss": 2.9443, "step": 2023 }, { "epoch": 20.24, "grad_norm": 93.92191314697266, "learning_rate": 3.2800000000000004e-06, "loss": 2.6209, "step": 2024 }, { "epoch": 20.25, "grad_norm": 128.9422607421875, "learning_rate": 3.276666666666667e-06, "loss": 2.914, "step": 2025 }, { "epoch": 20.26, "grad_norm": 33.89276123046875, "learning_rate": 3.2733333333333335e-06, "loss": 5.8168, "step": 2026 }, { "epoch": 20.27, "grad_norm": 68.74068450927734, "learning_rate": 3.2700000000000005e-06, "loss": 2.7356, "step": 2027 }, { "epoch": 20.28, "grad_norm": 181.96603393554688, "learning_rate": 3.266666666666667e-06, "loss": 2.8911, "step": 2028 }, { "epoch": 20.29, "grad_norm": 27.693937301635742, "learning_rate": 3.263333333333333e-06, "loss": 2.8202, "step": 2029 }, { "epoch": 20.3, "grad_norm": 91.70649719238281, "learning_rate": 3.2600000000000006e-06, "loss": 3.2245, "step": 2030 }, { "epoch": 20.31, "grad_norm": 49.91205978393555, "learning_rate": 3.2566666666666667e-06, "loss": 3.1032, "step": 2031 }, { "epoch": 20.32, "grad_norm": 81.2463150024414, "learning_rate": 3.2533333333333332e-06, "loss": 2.6671, "step": 2032 }, { "epoch": 20.33, "grad_norm": 41.73638916015625, "learning_rate": 3.2500000000000002e-06, "loss": 3.1657, "step": 2033 }, { "epoch": 20.34, "grad_norm": 123.677001953125, "learning_rate": 3.2466666666666668e-06, "loss": 2.5814, "step": 2034 }, { "epoch": 20.35, "grad_norm": 232.6008758544922, "learning_rate": 3.2433333333333333e-06, "loss": 2.6166, "step": 2035 }, { "epoch": 20.36, "grad_norm": 107.93050384521484, "learning_rate": 3.2400000000000003e-06, "loss": 2.9166, "step": 2036 }, { "epoch": 20.37, "grad_norm": 49.65397262573242, "learning_rate": 3.236666666666667e-06, "loss": 2.3576, "step": 2037 }, { "epoch": 20.38, "grad_norm": 299.16455078125, "learning_rate": 3.2333333333333334e-06, "loss": 2.6667, "step": 2038 }, { "epoch": 20.39, "grad_norm": 96.66838836669922, "learning_rate": 3.2300000000000004e-06, "loss": 2.6752, "step": 2039 }, { "epoch": 20.4, "grad_norm": 113.51480865478516, "learning_rate": 3.226666666666667e-06, "loss": 3.1808, "step": 2040 }, { "epoch": 20.41, "grad_norm": 45.77329635620117, "learning_rate": 3.223333333333334e-06, "loss": 2.777, "step": 2041 }, { "epoch": 20.42, "grad_norm": 56.489627838134766, "learning_rate": 3.2200000000000005e-06, "loss": 3.4942, "step": 2042 }, { "epoch": 20.43, "grad_norm": 42.5595703125, "learning_rate": 3.2166666666666666e-06, "loss": 3.1934, "step": 2043 }, { "epoch": 20.44, "grad_norm": 27.32871437072754, "learning_rate": 3.213333333333334e-06, "loss": 2.4832, "step": 2044 }, { "epoch": 20.45, "grad_norm": 55.79988479614258, "learning_rate": 3.21e-06, "loss": 2.7771, "step": 2045 }, { "epoch": 20.46, "grad_norm": 48.490821838378906, "learning_rate": 3.2066666666666667e-06, "loss": 3.5693, "step": 2046 }, { "epoch": 20.47, "grad_norm": 121.02519989013672, "learning_rate": 3.2033333333333337e-06, "loss": 3.9582, "step": 2047 }, { "epoch": 20.48, "grad_norm": 57.3109016418457, "learning_rate": 3.2000000000000003e-06, "loss": 3.2487, "step": 2048 }, { "epoch": 20.49, "grad_norm": 40.64695739746094, "learning_rate": 3.196666666666667e-06, "loss": 5.7925, "step": 2049 }, { "epoch": 20.5, "grad_norm": 119.5406723022461, "learning_rate": 3.193333333333334e-06, "loss": 2.8988, "step": 2050 }, { "epoch": 20.5, "eval_loss": 2.7381792068481445, "eval_map": 0.0048, "eval_map_50": 0.0111, "eval_map_75": 0.0038, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0345, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0022, "eval_map_medium": 0.0085, "eval_map_neckline": 0.0158, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1163, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0296, "eval_map_small": 0.0038, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0089, "eval_mar_10": 0.0255, "eval_mar_100": 0.0308, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.1245, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1476, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5336, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4557, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0365, "eval_mar_medium": 0.0451, "eval_mar_small": 0.0241, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.7379, "eval_samples_per_second": 5.066, "eval_steps_per_second": 1.267, "step": 2050 }, { "epoch": 20.51, "grad_norm": 42.2684326171875, "learning_rate": 3.1900000000000004e-06, "loss": 3.179, "step": 2051 }, { "epoch": 20.52, "grad_norm": 57.62427520751953, "learning_rate": 3.186666666666667e-06, "loss": 2.9704, "step": 2052 }, { "epoch": 20.53, "grad_norm": 33.44970703125, "learning_rate": 3.183333333333334e-06, "loss": 3.5574, "step": 2053 }, { "epoch": 20.54, "grad_norm": 85.60649871826172, "learning_rate": 3.1800000000000005e-06, "loss": 2.6897, "step": 2054 }, { "epoch": 20.55, "grad_norm": 33.25408935546875, "learning_rate": 3.1766666666666666e-06, "loss": 2.7847, "step": 2055 }, { "epoch": 20.56, "grad_norm": 32.536983489990234, "learning_rate": 3.173333333333334e-06, "loss": 3.0236, "step": 2056 }, { "epoch": 20.57, "grad_norm": 90.54663848876953, "learning_rate": 3.17e-06, "loss": 2.1888, "step": 2057 }, { "epoch": 20.58, "grad_norm": 48.39051055908203, "learning_rate": 3.1666666666666667e-06, "loss": 2.6597, "step": 2058 }, { "epoch": 20.59, "grad_norm": 57.217586517333984, "learning_rate": 3.1633333333333337e-06, "loss": 3.1605, "step": 2059 }, { "epoch": 20.6, "grad_norm": 102.51802825927734, "learning_rate": 3.1600000000000002e-06, "loss": 2.4565, "step": 2060 }, { "epoch": 20.61, "grad_norm": 49.9645881652832, "learning_rate": 3.156666666666667e-06, "loss": 3.2402, "step": 2061 }, { "epoch": 20.62, "grad_norm": 80.11786651611328, "learning_rate": 3.1533333333333338e-06, "loss": 3.536, "step": 2062 }, { "epoch": 20.63, "grad_norm": 194.35218811035156, "learning_rate": 3.1500000000000003e-06, "loss": 2.3743, "step": 2063 }, { "epoch": 20.64, "grad_norm": 36.0657844543457, "learning_rate": 3.146666666666667e-06, "loss": 3.2052, "step": 2064 }, { "epoch": 20.65, "grad_norm": 52.436946868896484, "learning_rate": 3.143333333333334e-06, "loss": 2.3007, "step": 2065 }, { "epoch": 20.66, "grad_norm": 101.19681549072266, "learning_rate": 3.1400000000000004e-06, "loss": 3.2854, "step": 2066 }, { "epoch": 20.67, "grad_norm": 51.75736999511719, "learning_rate": 3.1366666666666666e-06, "loss": 2.4653, "step": 2067 }, { "epoch": 20.68, "grad_norm": 73.16336059570312, "learning_rate": 3.133333333333334e-06, "loss": 2.9278, "step": 2068 }, { "epoch": 20.69, "grad_norm": 77.24678039550781, "learning_rate": 3.13e-06, "loss": 2.4176, "step": 2069 }, { "epoch": 20.7, "grad_norm": 86.9086685180664, "learning_rate": 3.1266666666666667e-06, "loss": 3.3695, "step": 2070 }, { "epoch": 20.71, "grad_norm": 53.7149543762207, "learning_rate": 3.1233333333333336e-06, "loss": 3.4268, "step": 2071 }, { "epoch": 20.72, "grad_norm": 73.05108642578125, "learning_rate": 3.12e-06, "loss": 2.8737, "step": 2072 }, { "epoch": 20.73, "grad_norm": 39.79237365722656, "learning_rate": 3.1166666666666668e-06, "loss": 2.4154, "step": 2073 }, { "epoch": 20.74, "grad_norm": 41.09585952758789, "learning_rate": 3.1133333333333337e-06, "loss": 2.6898, "step": 2074 }, { "epoch": 20.75, "grad_norm": 109.62870025634766, "learning_rate": 3.1100000000000003e-06, "loss": 2.2744, "step": 2075 }, { "epoch": 20.76, "grad_norm": 214.67706298828125, "learning_rate": 3.106666666666667e-06, "loss": 2.416, "step": 2076 }, { "epoch": 20.77, "grad_norm": 60.70411682128906, "learning_rate": 3.103333333333334e-06, "loss": 3.224, "step": 2077 }, { "epoch": 20.78, "grad_norm": 90.41375732421875, "learning_rate": 3.1000000000000004e-06, "loss": 3.3014, "step": 2078 }, { "epoch": 20.79, "grad_norm": 40.7693977355957, "learning_rate": 3.0966666666666665e-06, "loss": 3.0433, "step": 2079 }, { "epoch": 20.8, "grad_norm": 41.670684814453125, "learning_rate": 3.093333333333334e-06, "loss": 2.3848, "step": 2080 }, { "epoch": 20.81, "grad_norm": 46.19411087036133, "learning_rate": 3.09e-06, "loss": 2.623, "step": 2081 }, { "epoch": 20.82, "grad_norm": 107.92227935791016, "learning_rate": 3.0866666666666666e-06, "loss": 2.5102, "step": 2082 }, { "epoch": 20.83, "grad_norm": 38.268524169921875, "learning_rate": 3.0833333333333336e-06, "loss": 2.6666, "step": 2083 }, { "epoch": 20.84, "grad_norm": 128.63394165039062, "learning_rate": 3.08e-06, "loss": 3.1269, "step": 2084 }, { "epoch": 20.85, "grad_norm": 38.91095733642578, "learning_rate": 3.0766666666666667e-06, "loss": 2.9439, "step": 2085 }, { "epoch": 20.86, "grad_norm": 66.47561645507812, "learning_rate": 3.0733333333333337e-06, "loss": 2.285, "step": 2086 }, { "epoch": 20.87, "grad_norm": 48.57324981689453, "learning_rate": 3.0700000000000003e-06, "loss": 3.8756, "step": 2087 }, { "epoch": 20.88, "grad_norm": 72.58589935302734, "learning_rate": 3.066666666666667e-06, "loss": 2.6015, "step": 2088 }, { "epoch": 20.89, "grad_norm": 40.020687103271484, "learning_rate": 3.063333333333334e-06, "loss": 2.6126, "step": 2089 }, { "epoch": 20.9, "grad_norm": 49.53977584838867, "learning_rate": 3.0600000000000003e-06, "loss": 3.4893, "step": 2090 }, { "epoch": 20.91, "grad_norm": 66.86822509765625, "learning_rate": 3.0566666666666665e-06, "loss": 3.807, "step": 2091 }, { "epoch": 20.92, "grad_norm": 174.8988800048828, "learning_rate": 3.053333333333334e-06, "loss": 2.061, "step": 2092 }, { "epoch": 20.93, "grad_norm": 50.739830017089844, "learning_rate": 3.05e-06, "loss": 2.4007, "step": 2093 }, { "epoch": 20.94, "grad_norm": 119.03192901611328, "learning_rate": 3.0466666666666666e-06, "loss": 2.2858, "step": 2094 }, { "epoch": 20.95, "grad_norm": 27.04880142211914, "learning_rate": 3.0433333333333336e-06, "loss": 2.6345, "step": 2095 }, { "epoch": 20.96, "grad_norm": 39.64505386352539, "learning_rate": 3.04e-06, "loss": 2.2372, "step": 2096 }, { "epoch": 20.97, "grad_norm": 93.53118133544922, "learning_rate": 3.0366666666666667e-06, "loss": 3.0094, "step": 2097 }, { "epoch": 20.98, "grad_norm": 47.752925872802734, "learning_rate": 3.0333333333333337e-06, "loss": 3.1057, "step": 2098 }, { "epoch": 20.99, "grad_norm": 44.73044204711914, "learning_rate": 3.0300000000000002e-06, "loss": 2.8122, "step": 2099 }, { "epoch": 21.0, "grad_norm": 72.71589660644531, "learning_rate": 3.0266666666666668e-06, "loss": 2.5286, "step": 2100 }, { "epoch": 21.0, "eval_loss": 2.738297700881958, "eval_map": 0.0048, "eval_map_50": 0.0108, "eval_map_75": 0.0038, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0298, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0036, "eval_map_medium": 0.0081, "eval_map_neckline": 0.0153, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1206, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0294, "eval_map_small": 0.0042, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0083, "eval_mar_10": 0.0257, "eval_mar_100": 0.0309, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.0959, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1873, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5366, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4461, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0339, "eval_mar_medium": 0.0483, "eval_mar_small": 0.0248, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.8643, "eval_samples_per_second": 5.301, "eval_steps_per_second": 1.325, "step": 2100 }, { "epoch": 21.01, "grad_norm": 300.561279296875, "learning_rate": 3.0233333333333338e-06, "loss": 2.6516, "step": 2101 }, { "epoch": 21.02, "grad_norm": 91.05181121826172, "learning_rate": 3.0200000000000003e-06, "loss": 2.9234, "step": 2102 }, { "epoch": 21.03, "grad_norm": 60.55237579345703, "learning_rate": 3.0166666666666673e-06, "loss": 2.2894, "step": 2103 }, { "epoch": 21.04, "grad_norm": 43.9532585144043, "learning_rate": 3.013333333333334e-06, "loss": 2.9779, "step": 2104 }, { "epoch": 21.05, "grad_norm": 125.84983825683594, "learning_rate": 3.01e-06, "loss": 2.8716, "step": 2105 }, { "epoch": 21.06, "grad_norm": 44.992061614990234, "learning_rate": 3.0066666666666674e-06, "loss": 2.0411, "step": 2106 }, { "epoch": 21.07, "grad_norm": 138.88075256347656, "learning_rate": 3.0033333333333335e-06, "loss": 2.5918, "step": 2107 }, { "epoch": 21.08, "grad_norm": 66.44072723388672, "learning_rate": 3e-06, "loss": 3.1434, "step": 2108 }, { "epoch": 21.09, "grad_norm": 75.968994140625, "learning_rate": 2.996666666666667e-06, "loss": 2.7712, "step": 2109 }, { "epoch": 21.1, "grad_norm": 88.04601287841797, "learning_rate": 2.9933333333333336e-06, "loss": 2.5249, "step": 2110 }, { "epoch": 21.11, "grad_norm": 73.4135513305664, "learning_rate": 2.99e-06, "loss": 2.7843, "step": 2111 }, { "epoch": 21.12, "grad_norm": 91.49766540527344, "learning_rate": 2.986666666666667e-06, "loss": 3.332, "step": 2112 }, { "epoch": 21.13, "grad_norm": 29.42135238647461, "learning_rate": 2.9833333333333337e-06, "loss": 3.2924, "step": 2113 }, { "epoch": 21.14, "grad_norm": 58.587982177734375, "learning_rate": 2.9800000000000003e-06, "loss": 3.088, "step": 2114 }, { "epoch": 21.15, "grad_norm": 54.041446685791016, "learning_rate": 2.9766666666666672e-06, "loss": 3.0814, "step": 2115 }, { "epoch": 21.16, "grad_norm": 47.823612213134766, "learning_rate": 2.973333333333334e-06, "loss": 2.7089, "step": 2116 }, { "epoch": 21.17, "grad_norm": 55.94010925292969, "learning_rate": 2.97e-06, "loss": 3.1686, "step": 2117 }, { "epoch": 21.18, "grad_norm": 53.36690139770508, "learning_rate": 2.9666666666666673e-06, "loss": 3.1696, "step": 2118 }, { "epoch": 21.19, "grad_norm": 63.6583137512207, "learning_rate": 2.9633333333333335e-06, "loss": 2.0478, "step": 2119 }, { "epoch": 21.2, "grad_norm": 100.03040313720703, "learning_rate": 2.96e-06, "loss": 2.9016, "step": 2120 }, { "epoch": 21.21, "grad_norm": 61.008174896240234, "learning_rate": 2.956666666666667e-06, "loss": 2.8446, "step": 2121 }, { "epoch": 21.22, "grad_norm": 46.60892868041992, "learning_rate": 2.9533333333333336e-06, "loss": 2.7311, "step": 2122 }, { "epoch": 21.23, "grad_norm": 52.025691986083984, "learning_rate": 2.95e-06, "loss": 2.5881, "step": 2123 }, { "epoch": 21.24, "grad_norm": 338.8430480957031, "learning_rate": 2.946666666666667e-06, "loss": 2.6687, "step": 2124 }, { "epoch": 21.25, "grad_norm": 37.10315704345703, "learning_rate": 2.9433333333333337e-06, "loss": 2.5121, "step": 2125 }, { "epoch": 21.26, "grad_norm": 62.27751541137695, "learning_rate": 2.9400000000000002e-06, "loss": 5.9043, "step": 2126 }, { "epoch": 21.27, "grad_norm": 159.8858184814453, "learning_rate": 2.936666666666667e-06, "loss": 1.8354, "step": 2127 }, { "epoch": 21.28, "grad_norm": 52.21768569946289, "learning_rate": 2.9333333333333338e-06, "loss": 2.7724, "step": 2128 }, { "epoch": 21.29, "grad_norm": 253.92062377929688, "learning_rate": 2.93e-06, "loss": 2.4604, "step": 2129 }, { "epoch": 21.3, "grad_norm": 57.88286209106445, "learning_rate": 2.9266666666666673e-06, "loss": 2.0758, "step": 2130 }, { "epoch": 21.31, "grad_norm": 82.13592529296875, "learning_rate": 2.9233333333333334e-06, "loss": 2.7135, "step": 2131 }, { "epoch": 21.32, "grad_norm": 34.85330581665039, "learning_rate": 2.92e-06, "loss": 3.2615, "step": 2132 }, { "epoch": 21.33, "grad_norm": 47.26335144042969, "learning_rate": 2.916666666666667e-06, "loss": 2.7325, "step": 2133 }, { "epoch": 21.34, "grad_norm": 35.640167236328125, "learning_rate": 2.9133333333333335e-06, "loss": 2.9385, "step": 2134 }, { "epoch": 21.35, "grad_norm": 75.99909973144531, "learning_rate": 2.91e-06, "loss": 2.4327, "step": 2135 }, { "epoch": 21.36, "grad_norm": 70.28533935546875, "learning_rate": 2.906666666666667e-06, "loss": 2.7067, "step": 2136 }, { "epoch": 21.37, "grad_norm": 31.812549591064453, "learning_rate": 2.9033333333333336e-06, "loss": 2.6786, "step": 2137 }, { "epoch": 21.38, "grad_norm": 56.179935455322266, "learning_rate": 2.9e-06, "loss": 3.2672, "step": 2138 }, { "epoch": 21.39, "grad_norm": 38.506629943847656, "learning_rate": 2.896666666666667e-06, "loss": 3.3152, "step": 2139 }, { "epoch": 21.4, "grad_norm": 177.0730438232422, "learning_rate": 2.8933333333333337e-06, "loss": 5.7054, "step": 2140 }, { "epoch": 21.41, "grad_norm": 87.30292510986328, "learning_rate": 2.89e-06, "loss": 2.8806, "step": 2141 }, { "epoch": 21.42, "grad_norm": 67.49844360351562, "learning_rate": 2.8866666666666673e-06, "loss": 1.9144, "step": 2142 }, { "epoch": 21.43, "grad_norm": 109.7386245727539, "learning_rate": 2.8833333333333334e-06, "loss": 2.9191, "step": 2143 }, { "epoch": 21.44, "grad_norm": 40.99136734008789, "learning_rate": 2.88e-06, "loss": 2.9533, "step": 2144 }, { "epoch": 21.45, "grad_norm": 58.13808822631836, "learning_rate": 2.876666666666667e-06, "loss": 2.4964, "step": 2145 }, { "epoch": 21.46, "grad_norm": 67.48502349853516, "learning_rate": 2.8733333333333335e-06, "loss": 3.4624, "step": 2146 }, { "epoch": 21.47, "grad_norm": 100.36366271972656, "learning_rate": 2.87e-06, "loss": 3.0201, "step": 2147 }, { "epoch": 21.48, "grad_norm": 58.326541900634766, "learning_rate": 2.866666666666667e-06, "loss": 2.6598, "step": 2148 }, { "epoch": 21.49, "grad_norm": 56.077781677246094, "learning_rate": 2.8633333333333336e-06, "loss": 3.1796, "step": 2149 }, { "epoch": 21.5, "grad_norm": 52.33390426635742, "learning_rate": 2.86e-06, "loss": 3.2692, "step": 2150 }, { "epoch": 21.5, "eval_loss": 2.7387616634368896, "eval_map": 0.0051, "eval_map_50": 0.0113, "eval_map_75": 0.0041, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0421, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0025, "eval_map_medium": 0.0083, "eval_map_neckline": 0.0111, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1209, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0335, "eval_map_small": 0.0042, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0103, "eval_mar_10": 0.0264, "eval_mar_100": 0.0315, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.1653, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1635, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5172, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4443, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0355, "eval_mar_medium": 0.0454, "eval_mar_small": 0.0236, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.587, "eval_samples_per_second": 5.105, "eval_steps_per_second": 1.276, "step": 2150 }, { "epoch": 21.51, "grad_norm": 45.03437423706055, "learning_rate": 2.856666666666667e-06, "loss": 2.5978, "step": 2151 }, { "epoch": 21.52, "grad_norm": 105.70342254638672, "learning_rate": 2.8533333333333337e-06, "loss": 2.8564, "step": 2152 }, { "epoch": 21.53, "grad_norm": 42.89319610595703, "learning_rate": 2.85e-06, "loss": 2.9394, "step": 2153 }, { "epoch": 21.54, "grad_norm": 95.83782196044922, "learning_rate": 2.8466666666666672e-06, "loss": 3.9525, "step": 2154 }, { "epoch": 21.55, "grad_norm": 32.82400894165039, "learning_rate": 2.8433333333333334e-06, "loss": 3.0865, "step": 2155 }, { "epoch": 21.56, "grad_norm": 78.4056396484375, "learning_rate": 2.84e-06, "loss": 2.6457, "step": 2156 }, { "epoch": 21.57, "grad_norm": 56.637367248535156, "learning_rate": 2.836666666666667e-06, "loss": 2.869, "step": 2157 }, { "epoch": 21.58, "grad_norm": 69.1451644897461, "learning_rate": 2.8333333333333335e-06, "loss": 2.6362, "step": 2158 }, { "epoch": 21.59, "grad_norm": 51.34495162963867, "learning_rate": 2.83e-06, "loss": 2.0176, "step": 2159 }, { "epoch": 21.6, "grad_norm": 40.77092742919922, "learning_rate": 2.826666666666667e-06, "loss": 2.868, "step": 2160 }, { "epoch": 21.61, "grad_norm": 65.77891540527344, "learning_rate": 2.8233333333333335e-06, "loss": 2.1488, "step": 2161 }, { "epoch": 21.62, "grad_norm": 60.52617645263672, "learning_rate": 2.82e-06, "loss": 2.9916, "step": 2162 }, { "epoch": 21.63, "grad_norm": 42.56246566772461, "learning_rate": 2.816666666666667e-06, "loss": 3.0662, "step": 2163 }, { "epoch": 21.64, "grad_norm": 58.95469284057617, "learning_rate": 2.8133333333333336e-06, "loss": 2.9211, "step": 2164 }, { "epoch": 21.65, "grad_norm": 42.23038864135742, "learning_rate": 2.8100000000000006e-06, "loss": 2.7081, "step": 2165 }, { "epoch": 21.66, "grad_norm": 43.532127380371094, "learning_rate": 2.806666666666667e-06, "loss": 3.5268, "step": 2166 }, { "epoch": 21.67, "grad_norm": 322.2372131347656, "learning_rate": 2.8033333333333333e-06, "loss": 2.7016, "step": 2167 }, { "epoch": 21.68, "grad_norm": 46.20387649536133, "learning_rate": 2.8000000000000003e-06, "loss": 2.2883, "step": 2168 }, { "epoch": 21.69, "grad_norm": 98.14495849609375, "learning_rate": 2.796666666666667e-06, "loss": 2.5589, "step": 2169 }, { "epoch": 21.7, "grad_norm": 36.433082580566406, "learning_rate": 2.7933333333333334e-06, "loss": 2.656, "step": 2170 }, { "epoch": 21.71, "grad_norm": 100.60790252685547, "learning_rate": 2.7900000000000004e-06, "loss": 2.7596, "step": 2171 }, { "epoch": 21.72, "grad_norm": 113.38987731933594, "learning_rate": 2.786666666666667e-06, "loss": 3.3613, "step": 2172 }, { "epoch": 21.73, "grad_norm": 38.891075134277344, "learning_rate": 2.7833333333333335e-06, "loss": 2.4867, "step": 2173 }, { "epoch": 21.74, "grad_norm": 37.96001052856445, "learning_rate": 2.7800000000000005e-06, "loss": 2.4072, "step": 2174 }, { "epoch": 21.75, "grad_norm": 66.59097290039062, "learning_rate": 2.776666666666667e-06, "loss": 3.2954, "step": 2175 }, { "epoch": 21.76, "grad_norm": 104.68189239501953, "learning_rate": 2.7733333333333336e-06, "loss": 3.1751, "step": 2176 }, { "epoch": 21.77, "grad_norm": 65.4173812866211, "learning_rate": 2.7700000000000006e-06, "loss": 3.7105, "step": 2177 }, { "epoch": 21.78, "grad_norm": 42.146793365478516, "learning_rate": 2.766666666666667e-06, "loss": 2.1228, "step": 2178 }, { "epoch": 21.79, "grad_norm": 48.01837921142578, "learning_rate": 2.7633333333333333e-06, "loss": 2.6025, "step": 2179 }, { "epoch": 21.8, "grad_norm": 1466.8553466796875, "learning_rate": 2.7600000000000003e-06, "loss": 2.9878, "step": 2180 }, { "epoch": 21.81, "grad_norm": 43.59914016723633, "learning_rate": 2.756666666666667e-06, "loss": 3.2322, "step": 2181 }, { "epoch": 21.82, "grad_norm": 30.98531723022461, "learning_rate": 2.7533333333333334e-06, "loss": 3.4081, "step": 2182 }, { "epoch": 21.83, "grad_norm": 69.83747100830078, "learning_rate": 2.7500000000000004e-06, "loss": 2.78, "step": 2183 }, { "epoch": 21.84, "grad_norm": 97.20626068115234, "learning_rate": 2.746666666666667e-06, "loss": 2.816, "step": 2184 }, { "epoch": 21.85, "grad_norm": 1113.6451416015625, "learning_rate": 2.7433333333333335e-06, "loss": 3.573, "step": 2185 }, { "epoch": 21.86, "grad_norm": 95.1293716430664, "learning_rate": 2.7400000000000004e-06, "loss": 2.6233, "step": 2186 }, { "epoch": 21.87, "grad_norm": 94.77790069580078, "learning_rate": 2.736666666666667e-06, "loss": 3.2374, "step": 2187 }, { "epoch": 21.88, "grad_norm": 52.51283264160156, "learning_rate": 2.7333333333333336e-06, "loss": 2.3069, "step": 2188 }, { "epoch": 21.89, "grad_norm": 74.41666412353516, "learning_rate": 2.7300000000000005e-06, "loss": 2.2009, "step": 2189 }, { "epoch": 21.9, "grad_norm": 45.573211669921875, "learning_rate": 2.726666666666667e-06, "loss": 3.3502, "step": 2190 }, { "epoch": 21.91, "grad_norm": 81.46175384521484, "learning_rate": 2.7233333333333332e-06, "loss": 2.8589, "step": 2191 }, { "epoch": 21.92, "grad_norm": 110.280029296875, "learning_rate": 2.7200000000000002e-06, "loss": 2.5573, "step": 2192 }, { "epoch": 21.93, "grad_norm": 46.111976623535156, "learning_rate": 2.7166666666666668e-06, "loss": 2.2926, "step": 2193 }, { "epoch": 21.94, "grad_norm": 104.76593017578125, "learning_rate": 2.7133333333333333e-06, "loss": 2.5324, "step": 2194 }, { "epoch": 21.95, "grad_norm": 67.35022735595703, "learning_rate": 2.7100000000000003e-06, "loss": 2.7805, "step": 2195 }, { "epoch": 21.96, "grad_norm": 57.671356201171875, "learning_rate": 2.706666666666667e-06, "loss": 2.6811, "step": 2196 }, { "epoch": 21.97, "grad_norm": 97.4278793334961, "learning_rate": 2.7033333333333334e-06, "loss": 2.6205, "step": 2197 }, { "epoch": 21.98, "grad_norm": 50.16219711303711, "learning_rate": 2.7000000000000004e-06, "loss": 2.5428, "step": 2198 }, { "epoch": 21.99, "grad_norm": 40.15016555786133, "learning_rate": 2.696666666666667e-06, "loss": 2.6916, "step": 2199 }, { "epoch": 22.0, "grad_norm": 47.1495246887207, "learning_rate": 2.6933333333333335e-06, "loss": 2.7976, "step": 2200 }, { "epoch": 22.0, "eval_loss": 2.729414701461792, "eval_map": 0.0048, "eval_map_50": 0.0111, "eval_map_75": 0.0035, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0372, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0046, "eval_map_medium": 0.0082, "eval_map_neckline": 0.0119, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1153, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0318, "eval_map_small": 0.0037, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0105, "eval_mar_10": 0.0253, "eval_mar_100": 0.0308, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.1571, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1667, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.509, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4304, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0347, "eval_mar_medium": 0.0457, "eval_mar_small": 0.0237, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.4092, "eval_samples_per_second": 5.432, "eval_steps_per_second": 1.358, "step": 2200 }, { "epoch": 22.01, "grad_norm": 91.235595703125, "learning_rate": 2.6900000000000005e-06, "loss": 3.4588, "step": 2201 }, { "epoch": 22.02, "grad_norm": 90.49068450927734, "learning_rate": 2.686666666666667e-06, "loss": 2.9039, "step": 2202 }, { "epoch": 22.03, "grad_norm": 55.804691314697266, "learning_rate": 2.683333333333333e-06, "loss": 3.0104, "step": 2203 }, { "epoch": 22.04, "grad_norm": 39.47795867919922, "learning_rate": 2.68e-06, "loss": 2.9055, "step": 2204 }, { "epoch": 22.05, "grad_norm": 45.74256134033203, "learning_rate": 2.6766666666666667e-06, "loss": 3.0781, "step": 2205 }, { "epoch": 22.06, "grad_norm": 47.799373626708984, "learning_rate": 2.6733333333333333e-06, "loss": 2.8637, "step": 2206 }, { "epoch": 22.07, "grad_norm": 66.68999481201172, "learning_rate": 2.6700000000000003e-06, "loss": 2.6271, "step": 2207 }, { "epoch": 22.08, "grad_norm": 108.48275756835938, "learning_rate": 2.666666666666667e-06, "loss": 3.3732, "step": 2208 }, { "epoch": 22.09, "grad_norm": 71.06889343261719, "learning_rate": 2.6633333333333334e-06, "loss": 2.3353, "step": 2209 }, { "epoch": 22.1, "grad_norm": 42.15519714355469, "learning_rate": 2.6600000000000004e-06, "loss": 2.0758, "step": 2210 }, { "epoch": 22.11, "grad_norm": 41.26643371582031, "learning_rate": 2.656666666666667e-06, "loss": 2.964, "step": 2211 }, { "epoch": 22.12, "grad_norm": 43.77790832519531, "learning_rate": 2.6533333333333335e-06, "loss": 2.2658, "step": 2212 }, { "epoch": 22.13, "grad_norm": 36.34267044067383, "learning_rate": 2.6500000000000005e-06, "loss": 2.2548, "step": 2213 }, { "epoch": 22.14, "grad_norm": 45.25441360473633, "learning_rate": 2.646666666666667e-06, "loss": 2.4908, "step": 2214 }, { "epoch": 22.15, "grad_norm": 25.253662109375, "learning_rate": 2.643333333333333e-06, "loss": 3.0671, "step": 2215 }, { "epoch": 22.16, "grad_norm": 35.60101318359375, "learning_rate": 2.64e-06, "loss": 2.6902, "step": 2216 }, { "epoch": 22.17, "grad_norm": 56.225303649902344, "learning_rate": 2.6366666666666667e-06, "loss": 3.4189, "step": 2217 }, { "epoch": 22.18, "grad_norm": 72.75936889648438, "learning_rate": 2.6333333333333332e-06, "loss": 2.9736, "step": 2218 }, { "epoch": 22.19, "grad_norm": 64.94235229492188, "learning_rate": 2.6300000000000002e-06, "loss": 2.0412, "step": 2219 }, { "epoch": 22.2, "grad_norm": 46.102054595947266, "learning_rate": 2.6266666666666668e-06, "loss": 3.042, "step": 2220 }, { "epoch": 22.21, "grad_norm": 48.10826110839844, "learning_rate": 2.6233333333333333e-06, "loss": 2.5528, "step": 2221 }, { "epoch": 22.22, "grad_norm": 79.49690246582031, "learning_rate": 2.6200000000000003e-06, "loss": 2.9626, "step": 2222 }, { "epoch": 22.23, "grad_norm": 122.20927429199219, "learning_rate": 2.616666666666667e-06, "loss": 2.9199, "step": 2223 }, { "epoch": 22.24, "grad_norm": 45.095298767089844, "learning_rate": 2.6133333333333334e-06, "loss": 2.7942, "step": 2224 }, { "epoch": 22.25, "grad_norm": 31.37150001525879, "learning_rate": 2.6100000000000004e-06, "loss": 3.3608, "step": 2225 }, { "epoch": 22.26, "grad_norm": 69.27412414550781, "learning_rate": 2.606666666666667e-06, "loss": 2.4276, "step": 2226 }, { "epoch": 22.27, "grad_norm": 46.369781494140625, "learning_rate": 2.603333333333334e-06, "loss": 3.6398, "step": 2227 }, { "epoch": 22.28, "grad_norm": 64.9514389038086, "learning_rate": 2.6e-06, "loss": 2.6221, "step": 2228 }, { "epoch": 22.29, "grad_norm": 184.8511962890625, "learning_rate": 2.5966666666666667e-06, "loss": 2.2444, "step": 2229 }, { "epoch": 22.3, "grad_norm": 115.56673431396484, "learning_rate": 2.5933333333333336e-06, "loss": 3.1279, "step": 2230 }, { "epoch": 22.31, "grad_norm": 59.1316032409668, "learning_rate": 2.59e-06, "loss": 2.7955, "step": 2231 }, { "epoch": 22.32, "grad_norm": 51.769046783447266, "learning_rate": 2.5866666666666667e-06, "loss": 5.9735, "step": 2232 }, { "epoch": 22.33, "grad_norm": 157.15769958496094, "learning_rate": 2.5833333333333337e-06, "loss": 2.6447, "step": 2233 }, { "epoch": 22.34, "grad_norm": 48.246273040771484, "learning_rate": 2.5800000000000003e-06, "loss": 3.1231, "step": 2234 }, { "epoch": 22.35, "grad_norm": 76.64561462402344, "learning_rate": 2.576666666666667e-06, "loss": 3.4057, "step": 2235 }, { "epoch": 22.36, "grad_norm": 66.10486602783203, "learning_rate": 2.573333333333334e-06, "loss": 2.9828, "step": 2236 }, { "epoch": 22.37, "grad_norm": 62.938270568847656, "learning_rate": 2.5700000000000004e-06, "loss": 2.5975, "step": 2237 }, { "epoch": 22.38, "grad_norm": 36.62080001831055, "learning_rate": 2.566666666666667e-06, "loss": 2.5347, "step": 2238 }, { "epoch": 22.39, "grad_norm": 55.11225891113281, "learning_rate": 2.563333333333334e-06, "loss": 2.5246, "step": 2239 }, { "epoch": 22.4, "grad_norm": 141.8232879638672, "learning_rate": 2.56e-06, "loss": 2.0787, "step": 2240 }, { "epoch": 22.41, "grad_norm": 52.32972717285156, "learning_rate": 2.5566666666666666e-06, "loss": 2.3084, "step": 2241 }, { "epoch": 22.42, "grad_norm": 54.72819519042969, "learning_rate": 2.5533333333333336e-06, "loss": 3.0068, "step": 2242 }, { "epoch": 22.43, "grad_norm": 47.31985092163086, "learning_rate": 2.55e-06, "loss": 3.0471, "step": 2243 }, { "epoch": 22.44, "grad_norm": 41.37513732910156, "learning_rate": 2.5466666666666667e-06, "loss": 2.9982, "step": 2244 }, { "epoch": 22.45, "grad_norm": 44.700225830078125, "learning_rate": 2.5433333333333337e-06, "loss": 2.6291, "step": 2245 }, { "epoch": 22.46, "grad_norm": 44.684722900390625, "learning_rate": 2.5400000000000002e-06, "loss": 5.9219, "step": 2246 }, { "epoch": 22.47, "grad_norm": 179.7180938720703, "learning_rate": 2.536666666666667e-06, "loss": 3.2514, "step": 2247 }, { "epoch": 22.48, "grad_norm": 69.68059539794922, "learning_rate": 2.5333333333333338e-06, "loss": 3.1572, "step": 2248 }, { "epoch": 22.49, "grad_norm": 146.6685333251953, "learning_rate": 2.5300000000000003e-06, "loss": 2.8542, "step": 2249 }, { "epoch": 22.5, "grad_norm": 58.0498046875, "learning_rate": 2.526666666666667e-06, "loss": 2.3505, "step": 2250 }, { "epoch": 22.5, "eval_loss": 2.7265453338623047, "eval_map": 0.005, "eval_map_50": 0.0114, "eval_map_75": 0.0042, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0377, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0023, "eval_map_medium": 0.008, "eval_map_neckline": 0.0135, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1208, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0331, "eval_map_small": 0.0044, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.011, "eval_mar_10": 0.0275, "eval_mar_100": 0.0329, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.198, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.173, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5209, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4565, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0391, "eval_mar_medium": 0.048, "eval_mar_small": 0.0236, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.3823, "eval_samples_per_second": 5.44, "eval_steps_per_second": 1.36, "step": 2250 }, { "epoch": 22.51, "grad_norm": 88.6357192993164, "learning_rate": 2.523333333333334e-06, "loss": 2.3034, "step": 2251 }, { "epoch": 22.52, "grad_norm": 38.43184280395508, "learning_rate": 2.52e-06, "loss": 2.3077, "step": 2252 }, { "epoch": 22.53, "grad_norm": 107.0344009399414, "learning_rate": 2.5166666666666666e-06, "loss": 2.684, "step": 2253 }, { "epoch": 22.54, "grad_norm": 33.64009094238281, "learning_rate": 2.5133333333333336e-06, "loss": 2.5989, "step": 2254 }, { "epoch": 22.55, "grad_norm": 162.41427612304688, "learning_rate": 2.51e-06, "loss": 2.4522, "step": 2255 }, { "epoch": 22.56, "grad_norm": 173.39036560058594, "learning_rate": 2.5066666666666667e-06, "loss": 1.8958, "step": 2256 }, { "epoch": 22.57, "grad_norm": 54.80681228637695, "learning_rate": 2.5033333333333336e-06, "loss": 3.1171, "step": 2257 }, { "epoch": 22.58, "grad_norm": 33.85398483276367, "learning_rate": 2.5e-06, "loss": 3.1644, "step": 2258 }, { "epoch": 22.59, "grad_norm": 31.005512237548828, "learning_rate": 2.4966666666666668e-06, "loss": 2.4893, "step": 2259 }, { "epoch": 22.6, "grad_norm": 33.29765319824219, "learning_rate": 2.4933333333333333e-06, "loss": 1.9741, "step": 2260 }, { "epoch": 22.61, "grad_norm": 95.3650894165039, "learning_rate": 2.4900000000000003e-06, "loss": 2.8161, "step": 2261 }, { "epoch": 22.62, "grad_norm": 101.23648834228516, "learning_rate": 2.486666666666667e-06, "loss": 2.5472, "step": 2262 }, { "epoch": 22.63, "grad_norm": 50.40163040161133, "learning_rate": 2.4833333333333334e-06, "loss": 3.4847, "step": 2263 }, { "epoch": 22.64, "grad_norm": 55.1347541809082, "learning_rate": 2.4800000000000004e-06, "loss": 3.3575, "step": 2264 }, { "epoch": 22.65, "grad_norm": 84.09256744384766, "learning_rate": 2.476666666666667e-06, "loss": 2.1848, "step": 2265 }, { "epoch": 22.66, "grad_norm": 145.4600372314453, "learning_rate": 2.4733333333333335e-06, "loss": 3.2377, "step": 2266 }, { "epoch": 22.67, "grad_norm": 54.3372688293457, "learning_rate": 2.47e-06, "loss": 2.2097, "step": 2267 }, { "epoch": 22.68, "grad_norm": 52.598487854003906, "learning_rate": 2.466666666666667e-06, "loss": 2.9413, "step": 2268 }, { "epoch": 22.69, "grad_norm": 112.55009460449219, "learning_rate": 2.4633333333333336e-06, "loss": 2.6871, "step": 2269 }, { "epoch": 22.7, "grad_norm": 86.9411392211914, "learning_rate": 2.46e-06, "loss": 2.9302, "step": 2270 }, { "epoch": 22.71, "grad_norm": 79.46871185302734, "learning_rate": 2.4566666666666667e-06, "loss": 2.055, "step": 2271 }, { "epoch": 22.72, "grad_norm": 47.04165267944336, "learning_rate": 2.4533333333333333e-06, "loss": 3.1274, "step": 2272 }, { "epoch": 22.73, "grad_norm": 46.33061599731445, "learning_rate": 2.4500000000000003e-06, "loss": 2.4435, "step": 2273 }, { "epoch": 22.74, "grad_norm": 47.34523010253906, "learning_rate": 2.446666666666667e-06, "loss": 3.3118, "step": 2274 }, { "epoch": 22.75, "grad_norm": 59.182518005371094, "learning_rate": 2.443333333333334e-06, "loss": 2.396, "step": 2275 }, { "epoch": 22.76, "grad_norm": 45.104549407958984, "learning_rate": 2.4400000000000004e-06, "loss": 2.62, "step": 2276 }, { "epoch": 22.77, "grad_norm": 37.560821533203125, "learning_rate": 2.436666666666667e-06, "loss": 2.2767, "step": 2277 }, { "epoch": 22.78, "grad_norm": 65.04462432861328, "learning_rate": 2.4333333333333335e-06, "loss": 2.3659, "step": 2278 }, { "epoch": 22.79, "grad_norm": 134.8483123779297, "learning_rate": 2.43e-06, "loss": 3.221, "step": 2279 }, { "epoch": 22.8, "grad_norm": 46.68364715576172, "learning_rate": 2.426666666666667e-06, "loss": 3.1487, "step": 2280 }, { "epoch": 22.81, "grad_norm": 62.46312713623047, "learning_rate": 2.4233333333333336e-06, "loss": 2.4761, "step": 2281 }, { "epoch": 22.82, "grad_norm": 37.21040344238281, "learning_rate": 2.42e-06, "loss": 3.1769, "step": 2282 }, { "epoch": 22.83, "grad_norm": 104.02019500732422, "learning_rate": 2.4166666666666667e-06, "loss": 2.661, "step": 2283 }, { "epoch": 22.84, "grad_norm": 160.33958435058594, "learning_rate": 2.4133333333333337e-06, "loss": 3.3463, "step": 2284 }, { "epoch": 22.85, "grad_norm": 85.98295593261719, "learning_rate": 2.4100000000000002e-06, "loss": 3.1481, "step": 2285 }, { "epoch": 22.86, "grad_norm": 34.53330612182617, "learning_rate": 2.4066666666666668e-06, "loss": 2.4766, "step": 2286 }, { "epoch": 22.87, "grad_norm": 52.21182632446289, "learning_rate": 2.4033333333333338e-06, "loss": 3.1166, "step": 2287 }, { "epoch": 22.88, "grad_norm": 44.43632888793945, "learning_rate": 2.4000000000000003e-06, "loss": 3.3866, "step": 2288 }, { "epoch": 22.89, "grad_norm": 111.66146087646484, "learning_rate": 2.396666666666667e-06, "loss": 2.9583, "step": 2289 }, { "epoch": 22.9, "grad_norm": 160.676025390625, "learning_rate": 2.3933333333333334e-06, "loss": 2.4351, "step": 2290 }, { "epoch": 22.91, "grad_norm": 43.23748016357422, "learning_rate": 2.39e-06, "loss": 3.282, "step": 2291 }, { "epoch": 22.92, "grad_norm": 45.03369140625, "learning_rate": 2.386666666666667e-06, "loss": 3.4533, "step": 2292 }, { "epoch": 22.93, "grad_norm": 29.434919357299805, "learning_rate": 2.3833333333333335e-06, "loss": 2.912, "step": 2293 }, { "epoch": 22.94, "grad_norm": 53.89950942993164, "learning_rate": 2.38e-06, "loss": 3.0489, "step": 2294 }, { "epoch": 22.95, "grad_norm": 53.10448455810547, "learning_rate": 2.3766666666666666e-06, "loss": 2.398, "step": 2295 }, { "epoch": 22.96, "grad_norm": 57.59716796875, "learning_rate": 2.3733333333333336e-06, "loss": 3.0224, "step": 2296 }, { "epoch": 22.97, "grad_norm": 45.701175689697266, "learning_rate": 2.37e-06, "loss": 2.7215, "step": 2297 }, { "epoch": 22.98, "grad_norm": 50.121299743652344, "learning_rate": 2.3666666666666667e-06, "loss": 2.5751, "step": 2298 }, { "epoch": 22.99, "grad_norm": 41.140682220458984, "learning_rate": 2.3633333333333337e-06, "loss": 3.8567, "step": 2299 }, { "epoch": 23.0, "grad_norm": 62.09841537475586, "learning_rate": 2.3600000000000003e-06, "loss": 2.4853, "step": 2300 }, { "epoch": 23.0, "eval_loss": 2.712003469467163, "eval_map": 0.0052, "eval_map_50": 0.0119, "eval_map_75": 0.0041, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0453, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0031, "eval_map_medium": 0.008, "eval_map_neckline": 0.0124, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1233, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0341, "eval_map_small": 0.0042, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0109, "eval_mar_10": 0.0285, "eval_mar_100": 0.0339, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2306, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1714, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5194, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4704, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0432, "eval_mar_medium": 0.0476, "eval_mar_small": 0.0233, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.5096, "eval_samples_per_second": 5.403, "eval_steps_per_second": 1.351, "step": 2300 }, { "epoch": 23.01, "grad_norm": 37.085304260253906, "learning_rate": 2.356666666666667e-06, "loss": 2.8071, "step": 2301 }, { "epoch": 23.02, "grad_norm": 94.46769714355469, "learning_rate": 2.3533333333333334e-06, "loss": 2.2907, "step": 2302 }, { "epoch": 23.03, "grad_norm": 71.79440307617188, "learning_rate": 2.35e-06, "loss": 2.5827, "step": 2303 }, { "epoch": 23.04, "grad_norm": 99.4516830444336, "learning_rate": 2.346666666666667e-06, "loss": 2.2229, "step": 2304 }, { "epoch": 23.05, "grad_norm": 63.833770751953125, "learning_rate": 2.3433333333333335e-06, "loss": 3.3139, "step": 2305 }, { "epoch": 23.06, "grad_norm": 291.5792236328125, "learning_rate": 2.3400000000000005e-06, "loss": 3.117, "step": 2306 }, { "epoch": 23.07, "grad_norm": 55.45703887939453, "learning_rate": 2.3366666666666666e-06, "loss": 2.7574, "step": 2307 }, { "epoch": 23.08, "grad_norm": 63.998435974121094, "learning_rate": 2.3333333333333336e-06, "loss": 2.3677, "step": 2308 }, { "epoch": 23.09, "grad_norm": 69.12519836425781, "learning_rate": 2.33e-06, "loss": 2.9876, "step": 2309 }, { "epoch": 23.1, "grad_norm": 45.224525451660156, "learning_rate": 2.3266666666666667e-06, "loss": 2.421, "step": 2310 }, { "epoch": 23.11, "grad_norm": 64.44141387939453, "learning_rate": 2.3233333333333337e-06, "loss": 2.3363, "step": 2311 }, { "epoch": 23.12, "grad_norm": 35.71406936645508, "learning_rate": 2.3200000000000002e-06, "loss": 2.82, "step": 2312 }, { "epoch": 23.13, "grad_norm": 32.570491790771484, "learning_rate": 2.316666666666667e-06, "loss": 3.0841, "step": 2313 }, { "epoch": 23.14, "grad_norm": 44.00971603393555, "learning_rate": 2.3133333333333333e-06, "loss": 2.4973, "step": 2314 }, { "epoch": 23.15, "grad_norm": 58.06011962890625, "learning_rate": 2.3100000000000003e-06, "loss": 2.8981, "step": 2315 }, { "epoch": 23.16, "grad_norm": 78.43318176269531, "learning_rate": 2.306666666666667e-06, "loss": 2.9508, "step": 2316 }, { "epoch": 23.17, "grad_norm": 85.24803161621094, "learning_rate": 2.3033333333333334e-06, "loss": 2.61, "step": 2317 }, { "epoch": 23.18, "grad_norm": 38.146732330322266, "learning_rate": 2.3000000000000004e-06, "loss": 2.6894, "step": 2318 }, { "epoch": 23.19, "grad_norm": 42.27162170410156, "learning_rate": 2.2966666666666666e-06, "loss": 2.7217, "step": 2319 }, { "epoch": 23.2, "grad_norm": 66.19497680664062, "learning_rate": 2.2933333333333335e-06, "loss": 2.857, "step": 2320 }, { "epoch": 23.21, "grad_norm": 57.26690673828125, "learning_rate": 2.29e-06, "loss": 2.3585, "step": 2321 }, { "epoch": 23.22, "grad_norm": 47.99272537231445, "learning_rate": 2.2866666666666667e-06, "loss": 3.5349, "step": 2322 }, { "epoch": 23.23, "grad_norm": 56.880348205566406, "learning_rate": 2.2833333333333336e-06, "loss": 2.6382, "step": 2323 }, { "epoch": 23.24, "grad_norm": 202.7523193359375, "learning_rate": 2.28e-06, "loss": 2.8173, "step": 2324 }, { "epoch": 23.25, "grad_norm": 29.1833553314209, "learning_rate": 2.2766666666666668e-06, "loss": 3.0744, "step": 2325 }, { "epoch": 23.26, "grad_norm": 46.105995178222656, "learning_rate": 2.2733333333333333e-06, "loss": 2.7737, "step": 2326 }, { "epoch": 23.27, "grad_norm": 71.60438537597656, "learning_rate": 2.2700000000000003e-06, "loss": 3.2407, "step": 2327 }, { "epoch": 23.28, "grad_norm": 177.1625518798828, "learning_rate": 2.266666666666667e-06, "loss": 2.9245, "step": 2328 }, { "epoch": 23.29, "grad_norm": 35.43979263305664, "learning_rate": 2.2633333333333334e-06, "loss": 3.001, "step": 2329 }, { "epoch": 23.3, "grad_norm": 47.84958267211914, "learning_rate": 2.2600000000000004e-06, "loss": 2.715, "step": 2330 }, { "epoch": 23.31, "grad_norm": 108.10082244873047, "learning_rate": 2.2566666666666665e-06, "loss": 2.1345, "step": 2331 }, { "epoch": 23.32, "grad_norm": 44.055206298828125, "learning_rate": 2.2533333333333335e-06, "loss": 2.7134, "step": 2332 }, { "epoch": 23.33, "grad_norm": 42.34551239013672, "learning_rate": 2.25e-06, "loss": 3.4766, "step": 2333 }, { "epoch": 23.34, "grad_norm": 63.96914291381836, "learning_rate": 2.2466666666666666e-06, "loss": 2.9767, "step": 2334 }, { "epoch": 23.35, "grad_norm": 78.3906478881836, "learning_rate": 2.2433333333333336e-06, "loss": 3.641, "step": 2335 }, { "epoch": 23.36, "grad_norm": 100.85836029052734, "learning_rate": 2.24e-06, "loss": 2.4933, "step": 2336 }, { "epoch": 23.37, "grad_norm": 53.467193603515625, "learning_rate": 2.236666666666667e-06, "loss": 3.235, "step": 2337 }, { "epoch": 23.38, "grad_norm": 72.84982299804688, "learning_rate": 2.2333333333333333e-06, "loss": 3.3051, "step": 2338 }, { "epoch": 23.39, "grad_norm": 110.12896728515625, "learning_rate": 2.2300000000000002e-06, "loss": 2.6934, "step": 2339 }, { "epoch": 23.4, "grad_norm": 45.99521255493164, "learning_rate": 2.226666666666667e-06, "loss": 2.3771, "step": 2340 }, { "epoch": 23.41, "grad_norm": 47.91047286987305, "learning_rate": 2.2233333333333334e-06, "loss": 2.693, "step": 2341 }, { "epoch": 23.42, "grad_norm": 106.23905944824219, "learning_rate": 2.2200000000000003e-06, "loss": 2.6929, "step": 2342 }, { "epoch": 23.43, "grad_norm": 96.97220611572266, "learning_rate": 2.216666666666667e-06, "loss": 3.158, "step": 2343 }, { "epoch": 23.44, "grad_norm": 51.159088134765625, "learning_rate": 2.2133333333333335e-06, "loss": 2.482, "step": 2344 }, { "epoch": 23.45, "grad_norm": 27.047563552856445, "learning_rate": 2.21e-06, "loss": 3.0094, "step": 2345 }, { "epoch": 23.46, "grad_norm": 45.752593994140625, "learning_rate": 2.206666666666667e-06, "loss": 3.491, "step": 2346 }, { "epoch": 23.47, "grad_norm": 66.07198333740234, "learning_rate": 2.2033333333333336e-06, "loss": 3.0127, "step": 2347 }, { "epoch": 23.48, "grad_norm": 45.4268684387207, "learning_rate": 2.2e-06, "loss": 2.8386, "step": 2348 }, { "epoch": 23.49, "grad_norm": 74.97437286376953, "learning_rate": 2.196666666666667e-06, "loss": 3.2284, "step": 2349 }, { "epoch": 23.5, "grad_norm": 44.574554443359375, "learning_rate": 2.1933333333333332e-06, "loss": 2.3593, "step": 2350 }, { "epoch": 23.5, "eval_loss": 2.7257940769195557, "eval_map": 0.0055, "eval_map_50": 0.0128, "eval_map_75": 0.004, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0549, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0034, "eval_map_medium": 0.0072, "eval_map_neckline": 0.0123, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1235, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.033, "eval_map_small": 0.0049, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0116, "eval_mar_10": 0.0283, "eval_mar_100": 0.0329, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2469, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1667, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.4985, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4357, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0406, "eval_mar_medium": 0.0455, "eval_mar_small": 0.0225, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.3891, "eval_samples_per_second": 5.438, "eval_steps_per_second": 1.359, "step": 2350 }, { "epoch": 23.51, "grad_norm": 24.01529312133789, "learning_rate": 2.19e-06, "loss": 3.0192, "step": 2351 }, { "epoch": 23.52, "grad_norm": 57.14105987548828, "learning_rate": 2.1866666666666668e-06, "loss": 3.1441, "step": 2352 }, { "epoch": 23.53, "grad_norm": 41.26996994018555, "learning_rate": 2.1833333333333333e-06, "loss": 3.0861, "step": 2353 }, { "epoch": 23.54, "grad_norm": 64.84845733642578, "learning_rate": 2.1800000000000003e-06, "loss": 2.8055, "step": 2354 }, { "epoch": 23.55, "grad_norm": 159.4359130859375, "learning_rate": 2.176666666666667e-06, "loss": 3.0223, "step": 2355 }, { "epoch": 23.56, "grad_norm": 113.4496078491211, "learning_rate": 2.1733333333333334e-06, "loss": 2.7565, "step": 2356 }, { "epoch": 23.57, "grad_norm": 73.5274658203125, "learning_rate": 2.17e-06, "loss": 2.4105, "step": 2357 }, { "epoch": 23.58, "grad_norm": 78.98088073730469, "learning_rate": 2.166666666666667e-06, "loss": 2.2693, "step": 2358 }, { "epoch": 23.59, "grad_norm": 116.04368591308594, "learning_rate": 2.1633333333333335e-06, "loss": 2.4374, "step": 2359 }, { "epoch": 23.6, "grad_norm": 250.24295043945312, "learning_rate": 2.16e-06, "loss": 3.1386, "step": 2360 }, { "epoch": 23.61, "grad_norm": 69.31257629394531, "learning_rate": 2.156666666666667e-06, "loss": 2.584, "step": 2361 }, { "epoch": 23.62, "grad_norm": 31.67272186279297, "learning_rate": 2.153333333333333e-06, "loss": 2.3064, "step": 2362 }, { "epoch": 23.63, "grad_norm": 32.99478530883789, "learning_rate": 2.15e-06, "loss": 3.073, "step": 2363 }, { "epoch": 23.64, "grad_norm": 50.62653732299805, "learning_rate": 2.1466666666666667e-06, "loss": 1.8384, "step": 2364 }, { "epoch": 23.65, "grad_norm": 49.41575241088867, "learning_rate": 2.1433333333333333e-06, "loss": 2.3936, "step": 2365 }, { "epoch": 23.66, "grad_norm": 31.354595184326172, "learning_rate": 2.1400000000000003e-06, "loss": 2.708, "step": 2366 }, { "epoch": 23.67, "grad_norm": 101.32943725585938, "learning_rate": 2.136666666666667e-06, "loss": 2.8247, "step": 2367 }, { "epoch": 23.68, "grad_norm": 31.852384567260742, "learning_rate": 2.133333333333334e-06, "loss": 2.6756, "step": 2368 }, { "epoch": 23.69, "grad_norm": 57.42645263671875, "learning_rate": 2.13e-06, "loss": 3.5337, "step": 2369 }, { "epoch": 23.7, "grad_norm": 93.0571060180664, "learning_rate": 2.126666666666667e-06, "loss": 2.5285, "step": 2370 }, { "epoch": 23.71, "grad_norm": 117.3485107421875, "learning_rate": 2.1233333333333335e-06, "loss": 2.6337, "step": 2371 }, { "epoch": 23.72, "grad_norm": 52.74510955810547, "learning_rate": 2.12e-06, "loss": 3.4072, "step": 2372 }, { "epoch": 23.73, "grad_norm": 46.131103515625, "learning_rate": 2.116666666666667e-06, "loss": 3.0345, "step": 2373 }, { "epoch": 23.74, "grad_norm": 62.93221664428711, "learning_rate": 2.1133333333333336e-06, "loss": 3.0433, "step": 2374 }, { "epoch": 23.75, "grad_norm": 44.66250991821289, "learning_rate": 2.11e-06, "loss": 3.8913, "step": 2375 }, { "epoch": 23.76, "grad_norm": 43.50780487060547, "learning_rate": 2.1066666666666667e-06, "loss": 3.3237, "step": 2376 }, { "epoch": 23.77, "grad_norm": 200.7189178466797, "learning_rate": 2.1033333333333337e-06, "loss": 2.4217, "step": 2377 }, { "epoch": 23.78, "grad_norm": 34.89189910888672, "learning_rate": 2.1000000000000002e-06, "loss": 5.6342, "step": 2378 }, { "epoch": 23.79, "grad_norm": 47.31129837036133, "learning_rate": 2.0966666666666668e-06, "loss": 2.8052, "step": 2379 }, { "epoch": 23.8, "grad_norm": 41.145626068115234, "learning_rate": 2.0933333333333338e-06, "loss": 2.4999, "step": 2380 }, { "epoch": 23.81, "grad_norm": 60.64415740966797, "learning_rate": 2.09e-06, "loss": 3.0653, "step": 2381 }, { "epoch": 23.82, "grad_norm": 125.75601959228516, "learning_rate": 2.086666666666667e-06, "loss": 2.5236, "step": 2382 }, { "epoch": 23.83, "grad_norm": 72.93500518798828, "learning_rate": 2.0833333333333334e-06, "loss": 2.5406, "step": 2383 }, { "epoch": 23.84, "grad_norm": 42.148136138916016, "learning_rate": 2.08e-06, "loss": 2.806, "step": 2384 }, { "epoch": 23.85, "grad_norm": 197.78758239746094, "learning_rate": 2.076666666666667e-06, "loss": 2.6796, "step": 2385 }, { "epoch": 23.86, "grad_norm": 304.1393737792969, "learning_rate": 2.0733333333333335e-06, "loss": 2.2526, "step": 2386 }, { "epoch": 23.87, "grad_norm": 502.1044006347656, "learning_rate": 2.07e-06, "loss": 2.5433, "step": 2387 }, { "epoch": 23.88, "grad_norm": 39.63005828857422, "learning_rate": 2.0666666666666666e-06, "loss": 2.553, "step": 2388 }, { "epoch": 23.89, "grad_norm": 35.65553283691406, "learning_rate": 2.0633333333333336e-06, "loss": 2.6069, "step": 2389 }, { "epoch": 23.9, "grad_norm": 92.74644470214844, "learning_rate": 2.06e-06, "loss": 2.9982, "step": 2390 }, { "epoch": 23.91, "grad_norm": 80.13140869140625, "learning_rate": 2.0566666666666667e-06, "loss": 2.9432, "step": 2391 }, { "epoch": 23.92, "grad_norm": 65.80624389648438, "learning_rate": 2.0533333333333337e-06, "loss": 3.2555, "step": 2392 }, { "epoch": 23.93, "grad_norm": 71.08830261230469, "learning_rate": 2.05e-06, "loss": 2.297, "step": 2393 }, { "epoch": 23.94, "grad_norm": 73.50988006591797, "learning_rate": 2.046666666666667e-06, "loss": 2.8018, "step": 2394 }, { "epoch": 23.95, "grad_norm": 37.746212005615234, "learning_rate": 2.0433333333333334e-06, "loss": 2.9063, "step": 2395 }, { "epoch": 23.96, "grad_norm": 41.37727737426758, "learning_rate": 2.04e-06, "loss": 3.1417, "step": 2396 }, { "epoch": 23.97, "grad_norm": 86.76824951171875, "learning_rate": 2.036666666666667e-06, "loss": 2.6207, "step": 2397 }, { "epoch": 23.98, "grad_norm": 54.253543853759766, "learning_rate": 2.0333333333333335e-06, "loss": 2.7943, "step": 2398 }, { "epoch": 23.99, "grad_norm": 71.86077117919922, "learning_rate": 2.0300000000000005e-06, "loss": 5.7117, "step": 2399 }, { "epoch": 24.0, "grad_norm": 141.74017333984375, "learning_rate": 2.0266666666666666e-06, "loss": 2.4246, "step": 2400 }, { "epoch": 24.0, "eval_loss": 2.7011401653289795, "eval_map": 0.0054, "eval_map_50": 0.0125, "eval_map_75": 0.0041, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0476, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0028, "eval_map_medium": 0.0081, "eval_map_neckline": 0.0125, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1268, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0328, "eval_map_small": 0.0044, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0112, "eval_mar_10": 0.0297, "eval_mar_100": 0.0346, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2592, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1698, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5269, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4609, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0412, "eval_mar_medium": 0.0468, "eval_mar_small": 0.0245, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.8634, "eval_samples_per_second": 5.301, "eval_steps_per_second": 1.325, "step": 2400 }, { "epoch": 24.01, "grad_norm": 133.74630737304688, "learning_rate": 2.0233333333333336e-06, "loss": 2.1244, "step": 2401 }, { "epoch": 24.02, "grad_norm": 48.11177062988281, "learning_rate": 2.02e-06, "loss": 3.3874, "step": 2402 }, { "epoch": 24.03, "grad_norm": 126.05386352539062, "learning_rate": 2.0166666666666667e-06, "loss": 2.8826, "step": 2403 }, { "epoch": 24.04, "grad_norm": 34.50734329223633, "learning_rate": 2.0133333333333337e-06, "loss": 1.7256, "step": 2404 }, { "epoch": 24.05, "grad_norm": 75.16394805908203, "learning_rate": 2.0100000000000002e-06, "loss": 2.5956, "step": 2405 }, { "epoch": 24.06, "grad_norm": 49.22208023071289, "learning_rate": 2.006666666666667e-06, "loss": 2.8825, "step": 2406 }, { "epoch": 24.07, "grad_norm": 35.40898513793945, "learning_rate": 2.0033333333333334e-06, "loss": 5.8517, "step": 2407 }, { "epoch": 24.08, "grad_norm": 83.48208618164062, "learning_rate": 2.0000000000000003e-06, "loss": 2.8941, "step": 2408 }, { "epoch": 24.09, "grad_norm": 67.0877685546875, "learning_rate": 1.996666666666667e-06, "loss": 3.5376, "step": 2409 }, { "epoch": 24.1, "grad_norm": 66.10240173339844, "learning_rate": 1.9933333333333334e-06, "loss": 2.5564, "step": 2410 }, { "epoch": 24.11, "grad_norm": 38.96535110473633, "learning_rate": 1.9900000000000004e-06, "loss": 2.64, "step": 2411 }, { "epoch": 24.12, "grad_norm": 38.76250076293945, "learning_rate": 1.9866666666666666e-06, "loss": 3.0638, "step": 2412 }, { "epoch": 24.13, "grad_norm": 126.35408782958984, "learning_rate": 1.9833333333333335e-06, "loss": 3.1807, "step": 2413 }, { "epoch": 24.14, "grad_norm": 322.1502990722656, "learning_rate": 1.98e-06, "loss": 2.9744, "step": 2414 }, { "epoch": 24.15, "grad_norm": 42.04079818725586, "learning_rate": 1.9766666666666667e-06, "loss": 3.155, "step": 2415 }, { "epoch": 24.16, "grad_norm": 49.69563293457031, "learning_rate": 1.9733333333333336e-06, "loss": 2.2539, "step": 2416 }, { "epoch": 24.17, "grad_norm": 25.023574829101562, "learning_rate": 1.97e-06, "loss": 2.9386, "step": 2417 }, { "epoch": 24.18, "grad_norm": 49.90513610839844, "learning_rate": 1.9666666666666668e-06, "loss": 2.6345, "step": 2418 }, { "epoch": 24.19, "grad_norm": 83.28727722167969, "learning_rate": 1.9633333333333333e-06, "loss": 2.1277, "step": 2419 }, { "epoch": 24.2, "grad_norm": 58.10289764404297, "learning_rate": 1.9600000000000003e-06, "loss": 2.8853, "step": 2420 }, { "epoch": 24.21, "grad_norm": 52.828453063964844, "learning_rate": 1.956666666666667e-06, "loss": 2.5244, "step": 2421 }, { "epoch": 24.22, "grad_norm": 79.8495101928711, "learning_rate": 1.9533333333333334e-06, "loss": 2.6046, "step": 2422 }, { "epoch": 24.23, "grad_norm": 55.376163482666016, "learning_rate": 1.9500000000000004e-06, "loss": 2.7852, "step": 2423 }, { "epoch": 24.24, "grad_norm": 81.81238555908203, "learning_rate": 1.9466666666666665e-06, "loss": 2.2438, "step": 2424 }, { "epoch": 24.25, "grad_norm": 55.77919006347656, "learning_rate": 1.9433333333333335e-06, "loss": 3.3793, "step": 2425 }, { "epoch": 24.26, "grad_norm": 60.355690002441406, "learning_rate": 1.94e-06, "loss": 2.7933, "step": 2426 }, { "epoch": 24.27, "grad_norm": 83.03820037841797, "learning_rate": 1.9366666666666666e-06, "loss": 2.7483, "step": 2427 }, { "epoch": 24.28, "grad_norm": 93.17903900146484, "learning_rate": 1.9333333333333336e-06, "loss": 2.8618, "step": 2428 }, { "epoch": 24.29, "grad_norm": 72.9843521118164, "learning_rate": 1.93e-06, "loss": 2.4647, "step": 2429 }, { "epoch": 24.3, "grad_norm": 136.27796936035156, "learning_rate": 1.926666666666667e-06, "loss": 2.7158, "step": 2430 }, { "epoch": 24.31, "grad_norm": 41.07181930541992, "learning_rate": 1.9233333333333333e-06, "loss": 2.2025, "step": 2431 }, { "epoch": 24.32, "grad_norm": 33.5441780090332, "learning_rate": 1.9200000000000003e-06, "loss": 3.2209, "step": 2432 }, { "epoch": 24.33, "grad_norm": 92.59605407714844, "learning_rate": 1.916666666666667e-06, "loss": 3.2286, "step": 2433 }, { "epoch": 24.34, "grad_norm": 33.877784729003906, "learning_rate": 1.9133333333333334e-06, "loss": 2.1058, "step": 2434 }, { "epoch": 24.35, "grad_norm": 30.576221466064453, "learning_rate": 1.9100000000000003e-06, "loss": 2.383, "step": 2435 }, { "epoch": 24.36, "grad_norm": 29.921764373779297, "learning_rate": 1.906666666666667e-06, "loss": 3.1782, "step": 2436 }, { "epoch": 24.37, "grad_norm": 80.07665252685547, "learning_rate": 1.9033333333333335e-06, "loss": 3.0927, "step": 2437 }, { "epoch": 24.38, "grad_norm": 43.43018341064453, "learning_rate": 1.9000000000000002e-06, "loss": 2.532, "step": 2438 }, { "epoch": 24.39, "grad_norm": 102.9793701171875, "learning_rate": 1.896666666666667e-06, "loss": 2.7914, "step": 2439 }, { "epoch": 24.4, "grad_norm": 28.609786987304688, "learning_rate": 1.8933333333333333e-06, "loss": 2.5497, "step": 2440 }, { "epoch": 24.41, "grad_norm": 109.27538299560547, "learning_rate": 1.8900000000000001e-06, "loss": 3.2999, "step": 2441 }, { "epoch": 24.42, "grad_norm": 43.88134002685547, "learning_rate": 1.8866666666666669e-06, "loss": 2.6443, "step": 2442 }, { "epoch": 24.43, "grad_norm": 30.45478057861328, "learning_rate": 1.8833333333333334e-06, "loss": 2.5298, "step": 2443 }, { "epoch": 24.44, "grad_norm": 112.32008361816406, "learning_rate": 1.8800000000000002e-06, "loss": 2.7748, "step": 2444 }, { "epoch": 24.45, "grad_norm": 33.2665901184082, "learning_rate": 1.876666666666667e-06, "loss": 2.6247, "step": 2445 }, { "epoch": 24.46, "grad_norm": 59.35675048828125, "learning_rate": 1.8733333333333333e-06, "loss": 2.2828, "step": 2446 }, { "epoch": 24.47, "grad_norm": 37.586524963378906, "learning_rate": 1.87e-06, "loss": 2.4762, "step": 2447 }, { "epoch": 24.48, "grad_norm": 59.5407829284668, "learning_rate": 1.8666666666666669e-06, "loss": 3.4864, "step": 2448 }, { "epoch": 24.49, "grad_norm": 40.55196762084961, "learning_rate": 1.8633333333333334e-06, "loss": 3.2004, "step": 2449 }, { "epoch": 24.5, "grad_norm": 36.72858428955078, "learning_rate": 1.8600000000000002e-06, "loss": 6.0358, "step": 2450 }, { "epoch": 24.5, "eval_loss": 2.7072594165802, "eval_map": 0.0053, "eval_map_50": 0.0123, "eval_map_75": 0.0039, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0508, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0029, "eval_map_medium": 0.0079, "eval_map_neckline": 0.0119, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1212, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0327, "eval_map_small": 0.0041, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0125, "eval_mar_10": 0.0293, "eval_mar_100": 0.034, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2755, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1667, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5037, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4478, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.042, "eval_mar_medium": 0.0462, "eval_mar_small": 0.0228, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.5118, "eval_samples_per_second": 5.402, "eval_steps_per_second": 1.35, "step": 2450 }, { "epoch": 24.51, "grad_norm": 44.90060043334961, "learning_rate": 1.856666666666667e-06, "loss": 2.852, "step": 2451 }, { "epoch": 24.52, "grad_norm": 38.53439712524414, "learning_rate": 1.8533333333333333e-06, "loss": 2.4032, "step": 2452 }, { "epoch": 24.53, "grad_norm": 64.83155059814453, "learning_rate": 1.85e-06, "loss": 3.5127, "step": 2453 }, { "epoch": 24.54, "grad_norm": 57.43510437011719, "learning_rate": 1.8466666666666668e-06, "loss": 3.9495, "step": 2454 }, { "epoch": 24.55, "grad_norm": 51.26101303100586, "learning_rate": 1.8433333333333334e-06, "loss": 2.5237, "step": 2455 }, { "epoch": 24.56, "grad_norm": 42.930030822753906, "learning_rate": 1.8400000000000002e-06, "loss": 2.8937, "step": 2456 }, { "epoch": 24.57, "grad_norm": 56.71316909790039, "learning_rate": 1.836666666666667e-06, "loss": 2.7275, "step": 2457 }, { "epoch": 24.58, "grad_norm": 66.40061950683594, "learning_rate": 1.8333333333333333e-06, "loss": 2.6314, "step": 2458 }, { "epoch": 24.59, "grad_norm": 43.93732452392578, "learning_rate": 1.83e-06, "loss": 3.2605, "step": 2459 }, { "epoch": 24.6, "grad_norm": 75.63673400878906, "learning_rate": 1.8266666666666668e-06, "loss": 3.9377, "step": 2460 }, { "epoch": 24.61, "grad_norm": 83.95149230957031, "learning_rate": 1.8233333333333334e-06, "loss": 1.8254, "step": 2461 }, { "epoch": 24.62, "grad_norm": 62.20707702636719, "learning_rate": 1.8200000000000002e-06, "loss": 3.2006, "step": 2462 }, { "epoch": 24.63, "grad_norm": 44.409671783447266, "learning_rate": 1.816666666666667e-06, "loss": 2.8989, "step": 2463 }, { "epoch": 24.64, "grad_norm": 77.15461730957031, "learning_rate": 1.8133333333333337e-06, "loss": 3.2204, "step": 2464 }, { "epoch": 24.65, "grad_norm": 254.88726806640625, "learning_rate": 1.81e-06, "loss": 2.6955, "step": 2465 }, { "epoch": 24.66, "grad_norm": 31.735679626464844, "learning_rate": 1.8066666666666668e-06, "loss": 3.4283, "step": 2466 }, { "epoch": 24.67, "grad_norm": 57.8155632019043, "learning_rate": 1.8033333333333336e-06, "loss": 2.882, "step": 2467 }, { "epoch": 24.68, "grad_norm": 275.1094665527344, "learning_rate": 1.8000000000000001e-06, "loss": 3.5849, "step": 2468 }, { "epoch": 24.69, "grad_norm": 76.17835235595703, "learning_rate": 1.796666666666667e-06, "loss": 2.9381, "step": 2469 }, { "epoch": 24.7, "grad_norm": 60.586612701416016, "learning_rate": 1.7933333333333337e-06, "loss": 3.1421, "step": 2470 }, { "epoch": 24.71, "grad_norm": 48.780303955078125, "learning_rate": 1.79e-06, "loss": 2.3028, "step": 2471 }, { "epoch": 24.72, "grad_norm": 57.203590393066406, "learning_rate": 1.7866666666666668e-06, "loss": 2.2768, "step": 2472 }, { "epoch": 24.73, "grad_norm": 64.65271759033203, "learning_rate": 1.7833333333333336e-06, "loss": 2.846, "step": 2473 }, { "epoch": 24.74, "grad_norm": 71.29143524169922, "learning_rate": 1.7800000000000001e-06, "loss": 2.2282, "step": 2474 }, { "epoch": 24.75, "grad_norm": 47.829254150390625, "learning_rate": 1.7766666666666669e-06, "loss": 2.7212, "step": 2475 }, { "epoch": 24.76, "grad_norm": 92.00366973876953, "learning_rate": 1.7733333333333336e-06, "loss": 2.8597, "step": 2476 }, { "epoch": 24.77, "grad_norm": 33.09220504760742, "learning_rate": 1.77e-06, "loss": 2.9169, "step": 2477 }, { "epoch": 24.78, "grad_norm": 93.87297821044922, "learning_rate": 1.7666666666666668e-06, "loss": 2.7136, "step": 2478 }, { "epoch": 24.79, "grad_norm": 83.7288818359375, "learning_rate": 1.7633333333333335e-06, "loss": 2.2235, "step": 2479 }, { "epoch": 24.8, "grad_norm": 307.2098388671875, "learning_rate": 1.76e-06, "loss": 2.9684, "step": 2480 }, { "epoch": 24.81, "grad_norm": 62.588722229003906, "learning_rate": 1.7566666666666669e-06, "loss": 3.3946, "step": 2481 }, { "epoch": 24.82, "grad_norm": 54.14705276489258, "learning_rate": 1.7533333333333336e-06, "loss": 2.524, "step": 2482 }, { "epoch": 24.83, "grad_norm": 44.91950225830078, "learning_rate": 1.75e-06, "loss": 2.191, "step": 2483 }, { "epoch": 24.84, "grad_norm": 31.450525283813477, "learning_rate": 1.7466666666666667e-06, "loss": 3.319, "step": 2484 }, { "epoch": 24.85, "grad_norm": 53.17043685913086, "learning_rate": 1.7433333333333335e-06, "loss": 2.2089, "step": 2485 }, { "epoch": 24.86, "grad_norm": 56.551212310791016, "learning_rate": 1.74e-06, "loss": 2.9485, "step": 2486 }, { "epoch": 24.87, "grad_norm": 43.03234100341797, "learning_rate": 1.7366666666666668e-06, "loss": 2.4943, "step": 2487 }, { "epoch": 24.88, "grad_norm": 92.80931854248047, "learning_rate": 1.7333333333333336e-06, "loss": 2.5262, "step": 2488 }, { "epoch": 24.89, "grad_norm": 28.551332473754883, "learning_rate": 1.73e-06, "loss": 2.7685, "step": 2489 }, { "epoch": 24.9, "grad_norm": 44.7183952331543, "learning_rate": 1.7266666666666667e-06, "loss": 2.7669, "step": 2490 }, { "epoch": 24.91, "grad_norm": 81.70789337158203, "learning_rate": 1.7233333333333335e-06, "loss": 2.8875, "step": 2491 }, { "epoch": 24.92, "grad_norm": 47.768436431884766, "learning_rate": 1.72e-06, "loss": 2.7968, "step": 2492 }, { "epoch": 24.93, "grad_norm": 60.23580551147461, "learning_rate": 1.7166666666666668e-06, "loss": 2.2751, "step": 2493 }, { "epoch": 24.94, "grad_norm": 61.055641174316406, "learning_rate": 1.7133333333333336e-06, "loss": 2.4396, "step": 2494 }, { "epoch": 24.95, "grad_norm": 32.91073989868164, "learning_rate": 1.7100000000000004e-06, "loss": 3.0044, "step": 2495 }, { "epoch": 24.96, "grad_norm": 51.313236236572266, "learning_rate": 1.7066666666666667e-06, "loss": 2.0588, "step": 2496 }, { "epoch": 24.97, "grad_norm": 36.47032928466797, "learning_rate": 1.7033333333333335e-06, "loss": 2.5805, "step": 2497 }, { "epoch": 24.98, "grad_norm": 61.78495788574219, "learning_rate": 1.7000000000000002e-06, "loss": 3.7084, "step": 2498 }, { "epoch": 24.99, "grad_norm": 42.705535888671875, "learning_rate": 1.6966666666666668e-06, "loss": 2.977, "step": 2499 }, { "epoch": 25.0, "grad_norm": 98.77273559570312, "learning_rate": 1.6933333333333336e-06, "loss": 2.7728, "step": 2500 }, { "epoch": 25.0, "eval_loss": 2.7010297775268555, "eval_map": 0.0053, "eval_map_50": 0.0122, "eval_map_75": 0.0039, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0506, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0042, "eval_map_medium": 0.0071, "eval_map_neckline": 0.0111, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1207, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.034, "eval_map_small": 0.0047, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0122, "eval_mar_10": 0.0294, "eval_mar_100": 0.0339, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2551, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1667, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5134, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4565, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0418, "eval_mar_medium": 0.048, "eval_mar_small": 0.0221, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.0215, "eval_samples_per_second": 5.257, "eval_steps_per_second": 1.314, "step": 2500 }, { "epoch": 25.01, "grad_norm": 57.90734100341797, "learning_rate": 1.6900000000000003e-06, "loss": 3.2443, "step": 2501 }, { "epoch": 25.02, "grad_norm": 94.08735656738281, "learning_rate": 1.6866666666666667e-06, "loss": 3.4322, "step": 2502 }, { "epoch": 25.03, "grad_norm": 43.3316535949707, "learning_rate": 1.6833333333333335e-06, "loss": 3.1548, "step": 2503 }, { "epoch": 25.04, "grad_norm": 46.08415603637695, "learning_rate": 1.6800000000000002e-06, "loss": 2.4652, "step": 2504 }, { "epoch": 25.05, "grad_norm": 37.556365966796875, "learning_rate": 1.6766666666666668e-06, "loss": 2.4533, "step": 2505 }, { "epoch": 25.06, "grad_norm": 85.09952545166016, "learning_rate": 1.6733333333333335e-06, "loss": 2.8451, "step": 2506 }, { "epoch": 25.07, "grad_norm": 72.71622467041016, "learning_rate": 1.6700000000000003e-06, "loss": 2.7287, "step": 2507 }, { "epoch": 25.08, "grad_norm": 54.08368682861328, "learning_rate": 1.6666666666666667e-06, "loss": 2.8059, "step": 2508 }, { "epoch": 25.09, "grad_norm": 51.249107360839844, "learning_rate": 1.6633333333333334e-06, "loss": 2.5969, "step": 2509 }, { "epoch": 25.1, "grad_norm": 106.93138122558594, "learning_rate": 1.6600000000000002e-06, "loss": 2.1695, "step": 2510 }, { "epoch": 25.11, "grad_norm": 90.49313354492188, "learning_rate": 1.6566666666666668e-06, "loss": 2.8486, "step": 2511 }, { "epoch": 25.12, "grad_norm": 51.51242446899414, "learning_rate": 1.6533333333333335e-06, "loss": 2.5199, "step": 2512 }, { "epoch": 25.13, "grad_norm": 26.88058090209961, "learning_rate": 1.6500000000000003e-06, "loss": 2.5302, "step": 2513 }, { "epoch": 25.14, "grad_norm": 70.28350830078125, "learning_rate": 1.6466666666666666e-06, "loss": 2.6256, "step": 2514 }, { "epoch": 25.15, "grad_norm": 73.88119506835938, "learning_rate": 1.6433333333333334e-06, "loss": 2.6178, "step": 2515 }, { "epoch": 25.16, "grad_norm": 140.1497802734375, "learning_rate": 1.6400000000000002e-06, "loss": 2.565, "step": 2516 }, { "epoch": 25.17, "grad_norm": 68.18173217773438, "learning_rate": 1.6366666666666667e-06, "loss": 2.2729, "step": 2517 }, { "epoch": 25.18, "grad_norm": 76.015625, "learning_rate": 1.6333333333333335e-06, "loss": 3.3316, "step": 2518 }, { "epoch": 25.19, "grad_norm": 58.34204864501953, "learning_rate": 1.6300000000000003e-06, "loss": 2.7416, "step": 2519 }, { "epoch": 25.2, "grad_norm": 81.64630126953125, "learning_rate": 1.6266666666666666e-06, "loss": 2.2302, "step": 2520 }, { "epoch": 25.21, "grad_norm": 75.45901489257812, "learning_rate": 1.6233333333333334e-06, "loss": 2.7222, "step": 2521 }, { "epoch": 25.22, "grad_norm": 67.32763671875, "learning_rate": 1.6200000000000002e-06, "loss": 2.7893, "step": 2522 }, { "epoch": 25.23, "grad_norm": 31.902233123779297, "learning_rate": 1.6166666666666667e-06, "loss": 3.0228, "step": 2523 }, { "epoch": 25.24, "grad_norm": 69.539306640625, "learning_rate": 1.6133333333333335e-06, "loss": 2.615, "step": 2524 }, { "epoch": 25.25, "grad_norm": 62.16154479980469, "learning_rate": 1.6100000000000003e-06, "loss": 2.8788, "step": 2525 }, { "epoch": 25.26, "grad_norm": 400.85247802734375, "learning_rate": 1.606666666666667e-06, "loss": 2.824, "step": 2526 }, { "epoch": 25.27, "grad_norm": 119.32072448730469, "learning_rate": 1.6033333333333334e-06, "loss": 2.9658, "step": 2527 }, { "epoch": 25.28, "grad_norm": 48.13498306274414, "learning_rate": 1.6000000000000001e-06, "loss": 2.4056, "step": 2528 }, { "epoch": 25.29, "grad_norm": 35.360328674316406, "learning_rate": 1.596666666666667e-06, "loss": 3.2613, "step": 2529 }, { "epoch": 25.3, "grad_norm": 111.4293441772461, "learning_rate": 1.5933333333333335e-06, "loss": 2.0467, "step": 2530 }, { "epoch": 25.31, "grad_norm": 34.146400451660156, "learning_rate": 1.5900000000000002e-06, "loss": 2.2598, "step": 2531 }, { "epoch": 25.32, "grad_norm": 40.30018615722656, "learning_rate": 1.586666666666667e-06, "loss": 2.3189, "step": 2532 }, { "epoch": 25.33, "grad_norm": 52.436771392822266, "learning_rate": 1.5833333333333333e-06, "loss": 2.7143, "step": 2533 }, { "epoch": 25.34, "grad_norm": 60.13812255859375, "learning_rate": 1.5800000000000001e-06, "loss": 2.5054, "step": 2534 }, { "epoch": 25.35, "grad_norm": 37.40257263183594, "learning_rate": 1.5766666666666669e-06, "loss": 2.6629, "step": 2535 }, { "epoch": 25.36, "grad_norm": 53.33711242675781, "learning_rate": 1.5733333333333334e-06, "loss": 3.1571, "step": 2536 }, { "epoch": 25.37, "grad_norm": 55.953887939453125, "learning_rate": 1.5700000000000002e-06, "loss": 2.8172, "step": 2537 }, { "epoch": 25.38, "grad_norm": 75.31903076171875, "learning_rate": 1.566666666666667e-06, "loss": 2.7323, "step": 2538 }, { "epoch": 25.39, "grad_norm": 52.27378845214844, "learning_rate": 1.5633333333333333e-06, "loss": 2.3487, "step": 2539 }, { "epoch": 25.4, "grad_norm": 48.44373321533203, "learning_rate": 1.56e-06, "loss": 2.832, "step": 2540 }, { "epoch": 25.41, "grad_norm": 46.39986801147461, "learning_rate": 1.5566666666666669e-06, "loss": 2.8752, "step": 2541 }, { "epoch": 25.42, "grad_norm": 45.52206802368164, "learning_rate": 1.5533333333333334e-06, "loss": 2.9579, "step": 2542 }, { "epoch": 25.43, "grad_norm": 61.947086334228516, "learning_rate": 1.5500000000000002e-06, "loss": 2.5069, "step": 2543 }, { "epoch": 25.44, "grad_norm": 54.9757080078125, "learning_rate": 1.546666666666667e-06, "loss": 2.3394, "step": 2544 }, { "epoch": 25.45, "grad_norm": 51.94173049926758, "learning_rate": 1.5433333333333333e-06, "loss": 2.6105, "step": 2545 }, { "epoch": 25.46, "grad_norm": 34.68891143798828, "learning_rate": 1.54e-06, "loss": 3.3094, "step": 2546 }, { "epoch": 25.47, "grad_norm": 32.038429260253906, "learning_rate": 1.5366666666666668e-06, "loss": 3.4993, "step": 2547 }, { "epoch": 25.48, "grad_norm": 83.79023742675781, "learning_rate": 1.5333333333333334e-06, "loss": 3.2064, "step": 2548 }, { "epoch": 25.49, "grad_norm": 64.23443603515625, "learning_rate": 1.5300000000000002e-06, "loss": 5.881, "step": 2549 }, { "epoch": 25.5, "grad_norm": 59.78073501586914, "learning_rate": 1.526666666666667e-06, "loss": 2.5169, "step": 2550 }, { "epoch": 25.5, "eval_loss": 2.6967902183532715, "eval_map": 0.0054, "eval_map_50": 0.012, "eval_map_75": 0.0045, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.058, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0032, "eval_map_medium": 0.0081, "eval_map_neckline": 0.0112, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1204, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0333, "eval_map_small": 0.004, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0116, "eval_mar_10": 0.0289, "eval_mar_100": 0.0339, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2469, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1603, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5328, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4478, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0402, "eval_mar_medium": 0.0473, "eval_mar_small": 0.0235, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.7548, "eval_samples_per_second": 5.332, "eval_steps_per_second": 1.333, "step": 2550 }, { "epoch": 25.51, "grad_norm": 68.40452575683594, "learning_rate": 1.5233333333333333e-06, "loss": 2.1323, "step": 2551 }, { "epoch": 25.52, "grad_norm": 119.65465545654297, "learning_rate": 1.52e-06, "loss": 2.7469, "step": 2552 }, { "epoch": 25.53, "grad_norm": 51.49939727783203, "learning_rate": 1.5166666666666668e-06, "loss": 2.7959, "step": 2553 }, { "epoch": 25.54, "grad_norm": 70.23233795166016, "learning_rate": 1.5133333333333334e-06, "loss": 2.6745, "step": 2554 }, { "epoch": 25.55, "grad_norm": 92.0234146118164, "learning_rate": 1.5100000000000002e-06, "loss": 2.7543, "step": 2555 }, { "epoch": 25.56, "grad_norm": 294.5852355957031, "learning_rate": 1.506666666666667e-06, "loss": 2.6362, "step": 2556 }, { "epoch": 25.57, "grad_norm": 250.05523681640625, "learning_rate": 1.5033333333333337e-06, "loss": 3.4933, "step": 2557 }, { "epoch": 25.58, "grad_norm": 59.337181091308594, "learning_rate": 1.5e-06, "loss": 3.5804, "step": 2558 }, { "epoch": 25.59, "grad_norm": 70.1678695678711, "learning_rate": 1.4966666666666668e-06, "loss": 2.6608, "step": 2559 }, { "epoch": 25.6, "grad_norm": 66.59053039550781, "learning_rate": 1.4933333333333336e-06, "loss": 2.8121, "step": 2560 }, { "epoch": 25.61, "grad_norm": 65.44763946533203, "learning_rate": 1.4900000000000001e-06, "loss": 2.0744, "step": 2561 }, { "epoch": 25.62, "grad_norm": 117.65066528320312, "learning_rate": 1.486666666666667e-06, "loss": 3.1477, "step": 2562 }, { "epoch": 25.63, "grad_norm": 33.40025329589844, "learning_rate": 1.4833333333333337e-06, "loss": 3.6913, "step": 2563 }, { "epoch": 25.64, "grad_norm": 70.91312408447266, "learning_rate": 1.48e-06, "loss": 2.6588, "step": 2564 }, { "epoch": 25.65, "grad_norm": 64.67438507080078, "learning_rate": 1.4766666666666668e-06, "loss": 2.9816, "step": 2565 }, { "epoch": 25.66, "grad_norm": 75.95426177978516, "learning_rate": 1.4733333333333336e-06, "loss": 3.4937, "step": 2566 }, { "epoch": 25.67, "grad_norm": 44.19197463989258, "learning_rate": 1.4700000000000001e-06, "loss": 2.9723, "step": 2567 }, { "epoch": 25.68, "grad_norm": 40.21281433105469, "learning_rate": 1.4666666666666669e-06, "loss": 3.5897, "step": 2568 }, { "epoch": 25.69, "grad_norm": 38.92327117919922, "learning_rate": 1.4633333333333337e-06, "loss": 3.1008, "step": 2569 }, { "epoch": 25.7, "grad_norm": 62.24479293823242, "learning_rate": 1.46e-06, "loss": 3.2347, "step": 2570 }, { "epoch": 25.71, "grad_norm": 47.0329475402832, "learning_rate": 1.4566666666666668e-06, "loss": 3.0334, "step": 2571 }, { "epoch": 25.72, "grad_norm": 47.0855598449707, "learning_rate": 1.4533333333333335e-06, "loss": 2.7709, "step": 2572 }, { "epoch": 25.73, "grad_norm": 38.313087463378906, "learning_rate": 1.45e-06, "loss": 2.4352, "step": 2573 }, { "epoch": 25.74, "grad_norm": 53.646240234375, "learning_rate": 1.4466666666666669e-06, "loss": 2.9012, "step": 2574 }, { "epoch": 25.75, "grad_norm": 38.797176361083984, "learning_rate": 1.4433333333333336e-06, "loss": 3.3258, "step": 2575 }, { "epoch": 25.76, "grad_norm": 49.251461029052734, "learning_rate": 1.44e-06, "loss": 2.4193, "step": 2576 }, { "epoch": 25.77, "grad_norm": 132.9632568359375, "learning_rate": 1.4366666666666667e-06, "loss": 2.757, "step": 2577 }, { "epoch": 25.78, "grad_norm": 47.311119079589844, "learning_rate": 1.4333333333333335e-06, "loss": 2.8367, "step": 2578 }, { "epoch": 25.79, "grad_norm": 78.15989685058594, "learning_rate": 1.43e-06, "loss": 2.4995, "step": 2579 }, { "epoch": 25.8, "grad_norm": 69.68894958496094, "learning_rate": 1.4266666666666668e-06, "loss": 2.9348, "step": 2580 }, { "epoch": 25.81, "grad_norm": 108.61835479736328, "learning_rate": 1.4233333333333336e-06, "loss": 3.1044, "step": 2581 }, { "epoch": 25.82, "grad_norm": 52.58383560180664, "learning_rate": 1.42e-06, "loss": 2.5452, "step": 2582 }, { "epoch": 25.83, "grad_norm": 73.2197036743164, "learning_rate": 1.4166666666666667e-06, "loss": 2.0805, "step": 2583 }, { "epoch": 25.84, "grad_norm": 51.951507568359375, "learning_rate": 1.4133333333333335e-06, "loss": 2.7979, "step": 2584 }, { "epoch": 25.85, "grad_norm": 52.202064514160156, "learning_rate": 1.41e-06, "loss": 3.1239, "step": 2585 }, { "epoch": 25.86, "grad_norm": 139.7153778076172, "learning_rate": 1.4066666666666668e-06, "loss": 2.0014, "step": 2586 }, { "epoch": 25.87, "grad_norm": 68.16798400878906, "learning_rate": 1.4033333333333336e-06, "loss": 2.8757, "step": 2587 }, { "epoch": 25.88, "grad_norm": 56.30638122558594, "learning_rate": 1.4000000000000001e-06, "loss": 3.2837, "step": 2588 }, { "epoch": 25.89, "grad_norm": 69.10009765625, "learning_rate": 1.3966666666666667e-06, "loss": 5.7032, "step": 2589 }, { "epoch": 25.9, "grad_norm": 178.2053680419922, "learning_rate": 1.3933333333333335e-06, "loss": 2.7969, "step": 2590 }, { "epoch": 25.91, "grad_norm": 59.6418342590332, "learning_rate": 1.3900000000000002e-06, "loss": 2.1562, "step": 2591 }, { "epoch": 25.92, "grad_norm": 56.9749641418457, "learning_rate": 1.3866666666666668e-06, "loss": 1.6997, "step": 2592 }, { "epoch": 25.93, "grad_norm": 53.229312896728516, "learning_rate": 1.3833333333333336e-06, "loss": 3.0003, "step": 2593 }, { "epoch": 25.94, "grad_norm": 86.33858489990234, "learning_rate": 1.3800000000000001e-06, "loss": 2.7432, "step": 2594 }, { "epoch": 25.95, "grad_norm": 391.6607666015625, "learning_rate": 1.3766666666666667e-06, "loss": 2.0531, "step": 2595 }, { "epoch": 25.96, "grad_norm": 57.3655891418457, "learning_rate": 1.3733333333333335e-06, "loss": 2.8125, "step": 2596 }, { "epoch": 25.97, "grad_norm": 49.095550537109375, "learning_rate": 1.3700000000000002e-06, "loss": 3.3204, "step": 2597 }, { "epoch": 25.98, "grad_norm": 63.45648956298828, "learning_rate": 1.3666666666666668e-06, "loss": 2.6319, "step": 2598 }, { "epoch": 25.99, "grad_norm": 56.48057174682617, "learning_rate": 1.3633333333333336e-06, "loss": 2.6391, "step": 2599 }, { "epoch": 26.0, "grad_norm": 90.32573699951172, "learning_rate": 1.3600000000000001e-06, "loss": 2.9971, "step": 2600 }, { "epoch": 26.0, "eval_loss": 2.680102586746216, "eval_map": 0.0057, "eval_map_50": 0.0124, "eval_map_75": 0.0046, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0622, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0045, "eval_map_medium": 0.0084, "eval_map_neckline": 0.0115, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1221, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0369, "eval_map_small": 0.004, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0123, "eval_mar_10": 0.0303, "eval_mar_100": 0.0354, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2816, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1762, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5381, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4548, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0439, "eval_mar_medium": 0.0486, "eval_mar_small": 0.0234, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.3765, "eval_samples_per_second": 5.442, "eval_steps_per_second": 1.36, "step": 2600 }, { "epoch": 26.01, "grad_norm": 64.95953369140625, "learning_rate": 1.3566666666666667e-06, "loss": 2.6461, "step": 2601 }, { "epoch": 26.02, "grad_norm": 37.55771255493164, "learning_rate": 1.3533333333333334e-06, "loss": 2.0782, "step": 2602 }, { "epoch": 26.03, "grad_norm": 44.49614715576172, "learning_rate": 1.3500000000000002e-06, "loss": 2.126, "step": 2603 }, { "epoch": 26.04, "grad_norm": 137.8076171875, "learning_rate": 1.3466666666666668e-06, "loss": 3.1039, "step": 2604 }, { "epoch": 26.05, "grad_norm": 32.74199295043945, "learning_rate": 1.3433333333333335e-06, "loss": 2.6513, "step": 2605 }, { "epoch": 26.06, "grad_norm": 53.303558349609375, "learning_rate": 1.34e-06, "loss": 2.4789, "step": 2606 }, { "epoch": 26.07, "grad_norm": 59.16621017456055, "learning_rate": 1.3366666666666666e-06, "loss": 3.1782, "step": 2607 }, { "epoch": 26.08, "grad_norm": 89.15265655517578, "learning_rate": 1.3333333333333334e-06, "loss": 2.7129, "step": 2608 }, { "epoch": 26.09, "grad_norm": 26.89321517944336, "learning_rate": 1.3300000000000002e-06, "loss": 3.3196, "step": 2609 }, { "epoch": 26.1, "grad_norm": 44.720550537109375, "learning_rate": 1.3266666666666667e-06, "loss": 1.9796, "step": 2610 }, { "epoch": 26.11, "grad_norm": 64.3344955444336, "learning_rate": 1.3233333333333335e-06, "loss": 2.67, "step": 2611 }, { "epoch": 26.12, "grad_norm": 63.269405364990234, "learning_rate": 1.32e-06, "loss": 2.8197, "step": 2612 }, { "epoch": 26.13, "grad_norm": 69.33949279785156, "learning_rate": 1.3166666666666666e-06, "loss": 2.8602, "step": 2613 }, { "epoch": 26.14, "grad_norm": 86.10668182373047, "learning_rate": 1.3133333333333334e-06, "loss": 2.5115, "step": 2614 }, { "epoch": 26.15, "grad_norm": 35.44573974609375, "learning_rate": 1.3100000000000002e-06, "loss": 2.8028, "step": 2615 }, { "epoch": 26.16, "grad_norm": 107.04894256591797, "learning_rate": 1.3066666666666667e-06, "loss": 2.4017, "step": 2616 }, { "epoch": 26.17, "grad_norm": 81.20057678222656, "learning_rate": 1.3033333333333335e-06, "loss": 3.1086, "step": 2617 }, { "epoch": 26.18, "grad_norm": 49.503334045410156, "learning_rate": 1.3e-06, "loss": 2.9458, "step": 2618 }, { "epoch": 26.19, "grad_norm": 49.6707878112793, "learning_rate": 1.2966666666666668e-06, "loss": 2.6859, "step": 2619 }, { "epoch": 26.2, "grad_norm": 57.356327056884766, "learning_rate": 1.2933333333333334e-06, "loss": 2.7961, "step": 2620 }, { "epoch": 26.21, "grad_norm": 43.997802734375, "learning_rate": 1.2900000000000001e-06, "loss": 2.6748, "step": 2621 }, { "epoch": 26.22, "grad_norm": 74.72628784179688, "learning_rate": 1.286666666666667e-06, "loss": 2.8047, "step": 2622 }, { "epoch": 26.23, "grad_norm": 75.2428207397461, "learning_rate": 1.2833333333333335e-06, "loss": 2.1579, "step": 2623 }, { "epoch": 26.24, "grad_norm": 223.0723114013672, "learning_rate": 1.28e-06, "loss": 2.5131, "step": 2624 }, { "epoch": 26.25, "grad_norm": 772.798828125, "learning_rate": 1.2766666666666668e-06, "loss": 2.2353, "step": 2625 }, { "epoch": 26.26, "grad_norm": 42.62021255493164, "learning_rate": 1.2733333333333334e-06, "loss": 2.8693, "step": 2626 }, { "epoch": 26.27, "grad_norm": 102.04212951660156, "learning_rate": 1.2700000000000001e-06, "loss": 2.5033, "step": 2627 }, { "epoch": 26.28, "grad_norm": 48.30384063720703, "learning_rate": 1.2666666666666669e-06, "loss": 2.8773, "step": 2628 }, { "epoch": 26.29, "grad_norm": 34.20259475708008, "learning_rate": 1.2633333333333334e-06, "loss": 2.6346, "step": 2629 }, { "epoch": 26.3, "grad_norm": 82.35931396484375, "learning_rate": 1.26e-06, "loss": 3.1395, "step": 2630 }, { "epoch": 26.31, "grad_norm": 40.37326431274414, "learning_rate": 1.2566666666666668e-06, "loss": 3.5337, "step": 2631 }, { "epoch": 26.32, "grad_norm": 58.822296142578125, "learning_rate": 1.2533333333333333e-06, "loss": 3.0046, "step": 2632 }, { "epoch": 26.33, "grad_norm": 39.671630859375, "learning_rate": 1.25e-06, "loss": 4.0272, "step": 2633 }, { "epoch": 26.34, "grad_norm": 88.46854400634766, "learning_rate": 1.2466666666666667e-06, "loss": 3.5321, "step": 2634 }, { "epoch": 26.35, "grad_norm": 31.132434844970703, "learning_rate": 1.2433333333333334e-06, "loss": 3.117, "step": 2635 }, { "epoch": 26.36, "grad_norm": 42.90713882446289, "learning_rate": 1.2400000000000002e-06, "loss": 2.5495, "step": 2636 }, { "epoch": 26.37, "grad_norm": 197.24681091308594, "learning_rate": 1.2366666666666668e-06, "loss": 2.5313, "step": 2637 }, { "epoch": 26.38, "grad_norm": 39.93642044067383, "learning_rate": 1.2333333333333335e-06, "loss": 3.8574, "step": 2638 }, { "epoch": 26.39, "grad_norm": 41.86310958862305, "learning_rate": 1.23e-06, "loss": 2.7932, "step": 2639 }, { "epoch": 26.4, "grad_norm": 43.591552734375, "learning_rate": 1.2266666666666666e-06, "loss": 2.6848, "step": 2640 }, { "epoch": 26.41, "grad_norm": 35.9400749206543, "learning_rate": 1.2233333333333334e-06, "loss": 2.3531, "step": 2641 }, { "epoch": 26.42, "grad_norm": 57.104923248291016, "learning_rate": 1.2200000000000002e-06, "loss": 1.78, "step": 2642 }, { "epoch": 26.43, "grad_norm": 77.53011322021484, "learning_rate": 1.2166666666666667e-06, "loss": 2.7931, "step": 2643 }, { "epoch": 26.44, "grad_norm": 33.8992805480957, "learning_rate": 1.2133333333333335e-06, "loss": 2.8365, "step": 2644 }, { "epoch": 26.45, "grad_norm": 116.94156646728516, "learning_rate": 1.21e-06, "loss": 2.7242, "step": 2645 }, { "epoch": 26.46, "grad_norm": 52.92080307006836, "learning_rate": 1.2066666666666668e-06, "loss": 2.503, "step": 2646 }, { "epoch": 26.47, "grad_norm": 83.33980560302734, "learning_rate": 1.2033333333333334e-06, "loss": 2.7409, "step": 2647 }, { "epoch": 26.48, "grad_norm": 89.76388549804688, "learning_rate": 1.2000000000000002e-06, "loss": 2.9051, "step": 2648 }, { "epoch": 26.49, "grad_norm": 55.79908752441406, "learning_rate": 1.1966666666666667e-06, "loss": 3.2702, "step": 2649 }, { "epoch": 26.5, "grad_norm": 111.6187973022461, "learning_rate": 1.1933333333333335e-06, "loss": 2.3748, "step": 2650 }, { "epoch": 26.5, "eval_loss": 2.6808907985687256, "eval_map": 0.0056, "eval_map_50": 0.0123, "eval_map_75": 0.0046, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0597, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0038, "eval_map_medium": 0.0083, "eval_map_neckline": 0.0106, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1236, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0362, "eval_map_small": 0.004, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0121, "eval_mar_10": 0.0305, "eval_mar_100": 0.0354, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2816, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1762, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5381, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4539, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0427, "eval_mar_medium": 0.0487, "eval_mar_small": 0.0233, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.5412, "eval_samples_per_second": 5.117, "eval_steps_per_second": 1.279, "step": 2650 }, { "epoch": 26.51, "grad_norm": 57.8390007019043, "learning_rate": 1.19e-06, "loss": 2.7306, "step": 2651 }, { "epoch": 26.52, "grad_norm": 49.21223449707031, "learning_rate": 1.1866666666666668e-06, "loss": 2.707, "step": 2652 }, { "epoch": 26.53, "grad_norm": 97.20648193359375, "learning_rate": 1.1833333333333334e-06, "loss": 5.5979, "step": 2653 }, { "epoch": 26.54, "grad_norm": 304.931396484375, "learning_rate": 1.1800000000000001e-06, "loss": 2.8101, "step": 2654 }, { "epoch": 26.55, "grad_norm": 85.54536437988281, "learning_rate": 1.1766666666666667e-06, "loss": 2.3005, "step": 2655 }, { "epoch": 26.56, "grad_norm": 49.10234832763672, "learning_rate": 1.1733333333333335e-06, "loss": 3.0246, "step": 2656 }, { "epoch": 26.57, "grad_norm": 80.0844497680664, "learning_rate": 1.1700000000000002e-06, "loss": 2.9836, "step": 2657 }, { "epoch": 26.58, "grad_norm": 59.485626220703125, "learning_rate": 1.1666666666666668e-06, "loss": 3.3165, "step": 2658 }, { "epoch": 26.59, "grad_norm": 239.44985961914062, "learning_rate": 1.1633333333333333e-06, "loss": 3.0143, "step": 2659 }, { "epoch": 26.6, "grad_norm": 70.08740234375, "learning_rate": 1.1600000000000001e-06, "loss": 2.3778, "step": 2660 }, { "epoch": 26.61, "grad_norm": 58.1363410949707, "learning_rate": 1.1566666666666667e-06, "loss": 2.3202, "step": 2661 }, { "epoch": 26.62, "grad_norm": 106.35008239746094, "learning_rate": 1.1533333333333334e-06, "loss": 3.0317, "step": 2662 }, { "epoch": 26.63, "grad_norm": 68.99684143066406, "learning_rate": 1.1500000000000002e-06, "loss": 3.1451, "step": 2663 }, { "epoch": 26.64, "grad_norm": 227.90684509277344, "learning_rate": 1.1466666666666668e-06, "loss": 2.3153, "step": 2664 }, { "epoch": 26.65, "grad_norm": 54.66377258300781, "learning_rate": 1.1433333333333333e-06, "loss": 2.765, "step": 2665 }, { "epoch": 26.66, "grad_norm": 55.87306213378906, "learning_rate": 1.14e-06, "loss": 2.3867, "step": 2666 }, { "epoch": 26.67, "grad_norm": 138.58163452148438, "learning_rate": 1.1366666666666667e-06, "loss": 3.0889, "step": 2667 }, { "epoch": 26.68, "grad_norm": 43.06147003173828, "learning_rate": 1.1333333333333334e-06, "loss": 3.0728, "step": 2668 }, { "epoch": 26.69, "grad_norm": 33.691871643066406, "learning_rate": 1.1300000000000002e-06, "loss": 2.3242, "step": 2669 }, { "epoch": 26.7, "grad_norm": 59.60405731201172, "learning_rate": 1.1266666666666667e-06, "loss": 2.7393, "step": 2670 }, { "epoch": 26.71, "grad_norm": 39.62070083618164, "learning_rate": 1.1233333333333333e-06, "loss": 2.7068, "step": 2671 }, { "epoch": 26.72, "grad_norm": 58.843994140625, "learning_rate": 1.12e-06, "loss": 3.4066, "step": 2672 }, { "epoch": 26.73, "grad_norm": 54.00954055786133, "learning_rate": 1.1166666666666666e-06, "loss": 2.8292, "step": 2673 }, { "epoch": 26.74, "grad_norm": 31.935808181762695, "learning_rate": 1.1133333333333334e-06, "loss": 2.3611, "step": 2674 }, { "epoch": 26.75, "grad_norm": 72.93402099609375, "learning_rate": 1.1100000000000002e-06, "loss": 3.0062, "step": 2675 }, { "epoch": 26.76, "grad_norm": 78.08292388916016, "learning_rate": 1.1066666666666667e-06, "loss": 2.7145, "step": 2676 }, { "epoch": 26.77, "grad_norm": 69.54940795898438, "learning_rate": 1.1033333333333335e-06, "loss": 2.4622, "step": 2677 }, { "epoch": 26.78, "grad_norm": 56.787113189697266, "learning_rate": 1.1e-06, "loss": 2.1602, "step": 2678 }, { "epoch": 26.79, "grad_norm": 630.2937622070312, "learning_rate": 1.0966666666666666e-06, "loss": 2.9191, "step": 2679 }, { "epoch": 26.8, "grad_norm": 61.83102798461914, "learning_rate": 1.0933333333333334e-06, "loss": 3.0854, "step": 2680 }, { "epoch": 26.81, "grad_norm": 37.33146667480469, "learning_rate": 1.0900000000000002e-06, "loss": 3.0522, "step": 2681 }, { "epoch": 26.82, "grad_norm": 81.9482192993164, "learning_rate": 1.0866666666666667e-06, "loss": 1.81, "step": 2682 }, { "epoch": 26.83, "grad_norm": 54.26622009277344, "learning_rate": 1.0833333333333335e-06, "loss": 2.8708, "step": 2683 }, { "epoch": 26.84, "grad_norm": 54.24099349975586, "learning_rate": 1.08e-06, "loss": 5.7748, "step": 2684 }, { "epoch": 26.85, "grad_norm": 54.06476974487305, "learning_rate": 1.0766666666666666e-06, "loss": 2.0967, "step": 2685 }, { "epoch": 26.86, "grad_norm": 56.87983703613281, "learning_rate": 1.0733333333333334e-06, "loss": 3.0483, "step": 2686 }, { "epoch": 26.87, "grad_norm": 58.66854476928711, "learning_rate": 1.0700000000000001e-06, "loss": 3.1629, "step": 2687 }, { "epoch": 26.88, "grad_norm": 489.780517578125, "learning_rate": 1.066666666666667e-06, "loss": 2.4329, "step": 2688 }, { "epoch": 26.89, "grad_norm": 60.88666915893555, "learning_rate": 1.0633333333333335e-06, "loss": 2.8141, "step": 2689 }, { "epoch": 26.9, "grad_norm": 51.12504196166992, "learning_rate": 1.06e-06, "loss": 3.0742, "step": 2690 }, { "epoch": 26.91, "grad_norm": 96.5848159790039, "learning_rate": 1.0566666666666668e-06, "loss": 2.8233, "step": 2691 }, { "epoch": 26.92, "grad_norm": 31.416425704956055, "learning_rate": 1.0533333333333333e-06, "loss": 3.2619, "step": 2692 }, { "epoch": 26.93, "grad_norm": 33.90848922729492, "learning_rate": 1.0500000000000001e-06, "loss": 2.7348, "step": 2693 }, { "epoch": 26.94, "grad_norm": 34.735530853271484, "learning_rate": 1.0466666666666669e-06, "loss": 2.8594, "step": 2694 }, { "epoch": 26.95, "grad_norm": 88.4500961303711, "learning_rate": 1.0433333333333334e-06, "loss": 3.0191, "step": 2695 }, { "epoch": 26.96, "grad_norm": 80.83636474609375, "learning_rate": 1.04e-06, "loss": 2.971, "step": 2696 }, { "epoch": 26.97, "grad_norm": 52.467323303222656, "learning_rate": 1.0366666666666668e-06, "loss": 2.8681, "step": 2697 }, { "epoch": 26.98, "grad_norm": 176.21771240234375, "learning_rate": 1.0333333333333333e-06, "loss": 3.1837, "step": 2698 }, { "epoch": 26.99, "grad_norm": 77.66706085205078, "learning_rate": 1.03e-06, "loss": 2.9402, "step": 2699 }, { "epoch": 27.0, "grad_norm": 99.78781127929688, "learning_rate": 1.0266666666666669e-06, "loss": 2.8506, "step": 2700 }, { "epoch": 27.0, "eval_loss": 2.677872896194458, "eval_map": 0.0057, "eval_map_50": 0.012, "eval_map_75": 0.0047, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0609, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0038, "eval_map_medium": 0.0084, "eval_map_neckline": 0.0113, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.125, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.035, "eval_map_small": 0.0042, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0119, "eval_mar_10": 0.0302, "eval_mar_100": 0.0354, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2653, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1746, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5463, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4635, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0419, "eval_mar_medium": 0.0487, "eval_mar_small": 0.0235, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.3789, "eval_samples_per_second": 5.441, "eval_steps_per_second": 1.36, "step": 2700 }, { "epoch": 27.01, "grad_norm": 67.66587829589844, "learning_rate": 1.0233333333333334e-06, "loss": 2.6708, "step": 2701 }, { "epoch": 27.02, "grad_norm": 44.05902862548828, "learning_rate": 1.02e-06, "loss": 3.0974, "step": 2702 }, { "epoch": 27.03, "grad_norm": 88.94004821777344, "learning_rate": 1.0166666666666667e-06, "loss": 2.3218, "step": 2703 }, { "epoch": 27.04, "grad_norm": 41.528846740722656, "learning_rate": 1.0133333333333333e-06, "loss": 2.8027, "step": 2704 }, { "epoch": 27.05, "grad_norm": 39.66230392456055, "learning_rate": 1.01e-06, "loss": 3.2137, "step": 2705 }, { "epoch": 27.06, "grad_norm": 74.25580596923828, "learning_rate": 1.0066666666666668e-06, "loss": 2.1666, "step": 2706 }, { "epoch": 27.07, "grad_norm": 58.70429229736328, "learning_rate": 1.0033333333333334e-06, "loss": 3.0675, "step": 2707 }, { "epoch": 27.08, "grad_norm": 95.50252532958984, "learning_rate": 1.0000000000000002e-06, "loss": 2.9517, "step": 2708 }, { "epoch": 27.09, "grad_norm": 189.38104248046875, "learning_rate": 9.966666666666667e-07, "loss": 3.0159, "step": 2709 }, { "epoch": 27.1, "grad_norm": 38.341896057128906, "learning_rate": 9.933333333333333e-07, "loss": 2.2142, "step": 2710 }, { "epoch": 27.11, "grad_norm": 60.258731842041016, "learning_rate": 9.9e-07, "loss": 2.7633, "step": 2711 }, { "epoch": 27.12, "grad_norm": 300.357666015625, "learning_rate": 9.866666666666668e-07, "loss": 3.1031, "step": 2712 }, { "epoch": 27.13, "grad_norm": 91.83061981201172, "learning_rate": 9.833333333333334e-07, "loss": 3.2645, "step": 2713 }, { "epoch": 27.14, "grad_norm": 97.84458923339844, "learning_rate": 9.800000000000001e-07, "loss": 2.9603, "step": 2714 }, { "epoch": 27.15, "grad_norm": 93.63799285888672, "learning_rate": 9.766666666666667e-07, "loss": 3.5323, "step": 2715 }, { "epoch": 27.16, "grad_norm": 61.459693908691406, "learning_rate": 9.733333333333333e-07, "loss": 2.7152, "step": 2716 }, { "epoch": 27.17, "grad_norm": 131.77354431152344, "learning_rate": 9.7e-07, "loss": 2.4429, "step": 2717 }, { "epoch": 27.18, "grad_norm": 55.89741134643555, "learning_rate": 9.666666666666668e-07, "loss": 2.1568, "step": 2718 }, { "epoch": 27.19, "grad_norm": 43.412986755371094, "learning_rate": 9.633333333333336e-07, "loss": 2.3859, "step": 2719 }, { "epoch": 27.2, "grad_norm": 83.1761474609375, "learning_rate": 9.600000000000001e-07, "loss": 2.8726, "step": 2720 }, { "epoch": 27.21, "grad_norm": 137.03903198242188, "learning_rate": 9.566666666666667e-07, "loss": 2.0436, "step": 2721 }, { "epoch": 27.22, "grad_norm": 37.14773941040039, "learning_rate": 9.533333333333335e-07, "loss": 3.4946, "step": 2722 }, { "epoch": 27.23, "grad_norm": 69.07929229736328, "learning_rate": 9.500000000000001e-07, "loss": 2.8749, "step": 2723 }, { "epoch": 27.24, "grad_norm": 267.46466064453125, "learning_rate": 9.466666666666667e-07, "loss": 2.4628, "step": 2724 }, { "epoch": 27.25, "grad_norm": 35.05078125, "learning_rate": 9.433333333333334e-07, "loss": 3.1078, "step": 2725 }, { "epoch": 27.26, "grad_norm": 206.06854248046875, "learning_rate": 9.400000000000001e-07, "loss": 2.9778, "step": 2726 }, { "epoch": 27.27, "grad_norm": 42.50644302368164, "learning_rate": 9.366666666666667e-07, "loss": 2.7689, "step": 2727 }, { "epoch": 27.28, "grad_norm": 229.8758087158203, "learning_rate": 9.333333333333334e-07, "loss": 2.1893, "step": 2728 }, { "epoch": 27.29, "grad_norm": 122.26912689208984, "learning_rate": 9.300000000000001e-07, "loss": 2.5909, "step": 2729 }, { "epoch": 27.3, "grad_norm": 44.86104965209961, "learning_rate": 9.266666666666667e-07, "loss": 2.9502, "step": 2730 }, { "epoch": 27.31, "grad_norm": 46.61646270751953, "learning_rate": 9.233333333333334e-07, "loss": 2.7516, "step": 2731 }, { "epoch": 27.32, "grad_norm": 106.95244598388672, "learning_rate": 9.200000000000001e-07, "loss": 2.7011, "step": 2732 }, { "epoch": 27.33, "grad_norm": 68.23123168945312, "learning_rate": 9.166666666666666e-07, "loss": 2.3392, "step": 2733 }, { "epoch": 27.34, "grad_norm": 79.66944885253906, "learning_rate": 9.133333333333334e-07, "loss": 2.618, "step": 2734 }, { "epoch": 27.35, "grad_norm": 59.00004577636719, "learning_rate": 9.100000000000001e-07, "loss": 2.4625, "step": 2735 }, { "epoch": 27.36, "grad_norm": 88.73501586914062, "learning_rate": 9.066666666666668e-07, "loss": 2.7427, "step": 2736 }, { "epoch": 27.37, "grad_norm": 25.694883346557617, "learning_rate": 9.033333333333334e-07, "loss": 2.1707, "step": 2737 }, { "epoch": 27.38, "grad_norm": 169.7908935546875, "learning_rate": 9.000000000000001e-07, "loss": 2.2267, "step": 2738 }, { "epoch": 27.39, "grad_norm": 52.57016372680664, "learning_rate": 8.966666666666668e-07, "loss": 3.0791, "step": 2739 }, { "epoch": 27.4, "grad_norm": 39.02707290649414, "learning_rate": 8.933333333333334e-07, "loss": 2.5732, "step": 2740 }, { "epoch": 27.41, "grad_norm": 73.12456512451172, "learning_rate": 8.900000000000001e-07, "loss": 2.6277, "step": 2741 }, { "epoch": 27.42, "grad_norm": 74.69763946533203, "learning_rate": 8.866666666666668e-07, "loss": 2.5858, "step": 2742 }, { "epoch": 27.43, "grad_norm": 120.30226135253906, "learning_rate": 8.833333333333334e-07, "loss": 2.5787, "step": 2743 }, { "epoch": 27.44, "grad_norm": 69.66427612304688, "learning_rate": 8.8e-07, "loss": 2.777, "step": 2744 }, { "epoch": 27.45, "grad_norm": 114.405517578125, "learning_rate": 8.766666666666668e-07, "loss": 2.6318, "step": 2745 }, { "epoch": 27.46, "grad_norm": 48.72314453125, "learning_rate": 8.733333333333334e-07, "loss": 2.4839, "step": 2746 }, { "epoch": 27.47, "grad_norm": 26.610631942749023, "learning_rate": 8.7e-07, "loss": 3.2229, "step": 2747 }, { "epoch": 27.48, "grad_norm": 52.709529876708984, "learning_rate": 8.666666666666668e-07, "loss": 2.6376, "step": 2748 }, { "epoch": 27.49, "grad_norm": 53.4420280456543, "learning_rate": 8.633333333333334e-07, "loss": 2.8276, "step": 2749 }, { "epoch": 27.5, "grad_norm": 86.6237564086914, "learning_rate": 8.6e-07, "loss": 2.53, "step": 2750 }, { "epoch": 27.5, "eval_loss": 2.6831274032592773, "eval_map": 0.0057, "eval_map_50": 0.0128, "eval_map_75": 0.0047, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0606, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0036, "eval_map_medium": 0.0085, "eval_map_neckline": 0.0108, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1262, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0359, "eval_map_small": 0.0043, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.012, "eval_mar_10": 0.0304, "eval_mar_100": 0.0356, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2776, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1762, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5455, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4609, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0433, "eval_mar_medium": 0.0492, "eval_mar_small": 0.0234, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.0472, "eval_samples_per_second": 5.25, "eval_steps_per_second": 1.313, "step": 2750 }, { "epoch": 27.51, "grad_norm": 111.37831115722656, "learning_rate": 8.566666666666668e-07, "loss": 2.3119, "step": 2751 }, { "epoch": 27.52, "grad_norm": 487.41802978515625, "learning_rate": 8.533333333333334e-07, "loss": 2.2269, "step": 2752 }, { "epoch": 27.53, "grad_norm": 789.7392578125, "learning_rate": 8.500000000000001e-07, "loss": 2.588, "step": 2753 }, { "epoch": 27.54, "grad_norm": 90.5634994506836, "learning_rate": 8.466666666666668e-07, "loss": 2.4305, "step": 2754 }, { "epoch": 27.55, "grad_norm": 79.99908447265625, "learning_rate": 8.433333333333333e-07, "loss": 2.7209, "step": 2755 }, { "epoch": 27.56, "grad_norm": 47.07384490966797, "learning_rate": 8.400000000000001e-07, "loss": 2.3966, "step": 2756 }, { "epoch": 27.57, "grad_norm": 47.61046600341797, "learning_rate": 8.366666666666668e-07, "loss": 2.7018, "step": 2757 }, { "epoch": 27.58, "grad_norm": 45.77945327758789, "learning_rate": 8.333333333333333e-07, "loss": 2.9009, "step": 2758 }, { "epoch": 27.59, "grad_norm": 59.590152740478516, "learning_rate": 8.300000000000001e-07, "loss": 2.1356, "step": 2759 }, { "epoch": 27.6, "grad_norm": 32.81962966918945, "learning_rate": 8.266666666666668e-07, "loss": 2.6864, "step": 2760 }, { "epoch": 27.61, "grad_norm": 80.83741760253906, "learning_rate": 8.233333333333333e-07, "loss": 3.6851, "step": 2761 }, { "epoch": 27.62, "grad_norm": 136.29425048828125, "learning_rate": 8.200000000000001e-07, "loss": 2.5882, "step": 2762 }, { "epoch": 27.63, "grad_norm": 55.19911193847656, "learning_rate": 8.166666666666668e-07, "loss": 3.7601, "step": 2763 }, { "epoch": 27.64, "grad_norm": 470.7610778808594, "learning_rate": 8.133333333333333e-07, "loss": 1.7693, "step": 2764 }, { "epoch": 27.65, "grad_norm": 211.98252868652344, "learning_rate": 8.100000000000001e-07, "loss": 2.9872, "step": 2765 }, { "epoch": 27.66, "grad_norm": 54.39892578125, "learning_rate": 8.066666666666667e-07, "loss": 3.0879, "step": 2766 }, { "epoch": 27.67, "grad_norm": 442.80023193359375, "learning_rate": 8.033333333333335e-07, "loss": 2.4491, "step": 2767 }, { "epoch": 27.68, "grad_norm": 65.4479751586914, "learning_rate": 8.000000000000001e-07, "loss": 2.5762, "step": 2768 }, { "epoch": 27.69, "grad_norm": 39.42523956298828, "learning_rate": 7.966666666666667e-07, "loss": 2.9632, "step": 2769 }, { "epoch": 27.7, "grad_norm": 61.836246490478516, "learning_rate": 7.933333333333335e-07, "loss": 2.662, "step": 2770 }, { "epoch": 27.71, "grad_norm": 108.53718566894531, "learning_rate": 7.900000000000001e-07, "loss": 2.6621, "step": 2771 }, { "epoch": 27.72, "grad_norm": 34.24237060546875, "learning_rate": 7.866666666666667e-07, "loss": 3.1394, "step": 2772 }, { "epoch": 27.73, "grad_norm": 55.0642204284668, "learning_rate": 7.833333333333335e-07, "loss": 3.0449, "step": 2773 }, { "epoch": 27.74, "grad_norm": 79.21582794189453, "learning_rate": 7.8e-07, "loss": 2.5143, "step": 2774 }, { "epoch": 27.75, "grad_norm": 37.73117446899414, "learning_rate": 7.766666666666667e-07, "loss": 3.2587, "step": 2775 }, { "epoch": 27.76, "grad_norm": 31.058277130126953, "learning_rate": 7.733333333333335e-07, "loss": 2.9056, "step": 2776 }, { "epoch": 27.77, "grad_norm": 75.52549743652344, "learning_rate": 7.7e-07, "loss": 3.2427, "step": 2777 }, { "epoch": 27.78, "grad_norm": 34.74456024169922, "learning_rate": 7.666666666666667e-07, "loss": 3.6181, "step": 2778 }, { "epoch": 27.79, "grad_norm": 53.857879638671875, "learning_rate": 7.633333333333335e-07, "loss": 5.8228, "step": 2779 }, { "epoch": 27.8, "grad_norm": 240.91030883789062, "learning_rate": 7.6e-07, "loss": 3.3124, "step": 2780 }, { "epoch": 27.81, "grad_norm": 31.156335830688477, "learning_rate": 7.566666666666667e-07, "loss": 2.066, "step": 2781 }, { "epoch": 27.82, "grad_norm": 39.01289749145508, "learning_rate": 7.533333333333335e-07, "loss": 2.9959, "step": 2782 }, { "epoch": 27.83, "grad_norm": 40.66279220581055, "learning_rate": 7.5e-07, "loss": 3.1049, "step": 2783 }, { "epoch": 27.84, "grad_norm": 74.48126220703125, "learning_rate": 7.466666666666668e-07, "loss": 2.7676, "step": 2784 }, { "epoch": 27.85, "grad_norm": 55.55577850341797, "learning_rate": 7.433333333333335e-07, "loss": 2.2096, "step": 2785 }, { "epoch": 27.86, "grad_norm": 22.80941390991211, "learning_rate": 7.4e-07, "loss": 5.8771, "step": 2786 }, { "epoch": 27.87, "grad_norm": 71.47132873535156, "learning_rate": 7.366666666666668e-07, "loss": 3.2406, "step": 2787 }, { "epoch": 27.88, "grad_norm": 53.866310119628906, "learning_rate": 7.333333333333334e-07, "loss": 2.848, "step": 2788 }, { "epoch": 27.89, "grad_norm": 142.66522216796875, "learning_rate": 7.3e-07, "loss": 2.312, "step": 2789 }, { "epoch": 27.9, "grad_norm": 54.88626480102539, "learning_rate": 7.266666666666668e-07, "loss": 2.2469, "step": 2790 }, { "epoch": 27.91, "grad_norm": 56.256752014160156, "learning_rate": 7.233333333333334e-07, "loss": 3.1465, "step": 2791 }, { "epoch": 27.92, "grad_norm": 29.77903175354004, "learning_rate": 7.2e-07, "loss": 2.4471, "step": 2792 }, { "epoch": 27.93, "grad_norm": 46.13420486450195, "learning_rate": 7.166666666666668e-07, "loss": 2.5807, "step": 2793 }, { "epoch": 27.94, "grad_norm": 87.96176147460938, "learning_rate": 7.133333333333334e-07, "loss": 3.3243, "step": 2794 }, { "epoch": 27.95, "grad_norm": 23.70219612121582, "learning_rate": 7.1e-07, "loss": 3.529, "step": 2795 }, { "epoch": 27.96, "grad_norm": 68.40198516845703, "learning_rate": 7.066666666666667e-07, "loss": 3.1082, "step": 2796 }, { "epoch": 27.97, "grad_norm": 50.869937896728516, "learning_rate": 7.033333333333334e-07, "loss": 3.8249, "step": 2797 }, { "epoch": 27.98, "grad_norm": 41.98263931274414, "learning_rate": 7.000000000000001e-07, "loss": 3.1692, "step": 2798 }, { "epoch": 27.99, "grad_norm": 83.49356842041016, "learning_rate": 6.966666666666667e-07, "loss": 2.5772, "step": 2799 }, { "epoch": 28.0, "grad_norm": 76.60325622558594, "learning_rate": 6.933333333333334e-07, "loss": 2.1641, "step": 2800 }, { "epoch": 28.0, "eval_loss": 2.67155385017395, "eval_map": 0.006, "eval_map_50": 0.013, "eval_map_75": 0.0048, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0718, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0052, "eval_map_medium": 0.0085, "eval_map_neckline": 0.0111, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1261, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.036, "eval_map_small": 0.0042, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0127, "eval_mar_10": 0.0309, "eval_mar_100": 0.0362, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2939, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1746, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5463, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4704, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0445, "eval_mar_medium": 0.0485, "eval_mar_small": 0.0243, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.8861, "eval_samples_per_second": 5.295, "eval_steps_per_second": 1.324, "step": 2800 }, { "epoch": 28.01, "grad_norm": 703.5465698242188, "learning_rate": 6.900000000000001e-07, "loss": 3.5018, "step": 2801 }, { "epoch": 28.02, "grad_norm": 57.31896209716797, "learning_rate": 6.866666666666667e-07, "loss": 3.5015, "step": 2802 }, { "epoch": 28.03, "grad_norm": 143.8687744140625, "learning_rate": 6.833333333333334e-07, "loss": 2.2377, "step": 2803 }, { "epoch": 28.04, "grad_norm": 31.846202850341797, "learning_rate": 6.800000000000001e-07, "loss": 5.8854, "step": 2804 }, { "epoch": 28.05, "grad_norm": 52.451942443847656, "learning_rate": 6.766666666666667e-07, "loss": 2.7661, "step": 2805 }, { "epoch": 28.06, "grad_norm": 52.90067672729492, "learning_rate": 6.733333333333334e-07, "loss": 3.0535, "step": 2806 }, { "epoch": 28.07, "grad_norm": 79.01045989990234, "learning_rate": 6.7e-07, "loss": 3.042, "step": 2807 }, { "epoch": 28.08, "grad_norm": 162.77554321289062, "learning_rate": 6.666666666666667e-07, "loss": 2.4978, "step": 2808 }, { "epoch": 28.09, "grad_norm": 112.60151672363281, "learning_rate": 6.633333333333334e-07, "loss": 3.0093, "step": 2809 }, { "epoch": 28.1, "grad_norm": 45.38791275024414, "learning_rate": 6.6e-07, "loss": 2.7523, "step": 2810 }, { "epoch": 28.11, "grad_norm": 99.67398834228516, "learning_rate": 6.566666666666667e-07, "loss": 2.3202, "step": 2811 }, { "epoch": 28.12, "grad_norm": 70.28121185302734, "learning_rate": 6.533333333333334e-07, "loss": 3.1577, "step": 2812 }, { "epoch": 28.13, "grad_norm": 151.2437286376953, "learning_rate": 6.5e-07, "loss": 2.8597, "step": 2813 }, { "epoch": 28.14, "grad_norm": 77.7671890258789, "learning_rate": 6.466666666666667e-07, "loss": 2.5207, "step": 2814 }, { "epoch": 28.15, "grad_norm": 33.797401428222656, "learning_rate": 6.433333333333335e-07, "loss": 2.0657, "step": 2815 }, { "epoch": 28.16, "grad_norm": 66.44978332519531, "learning_rate": 6.4e-07, "loss": 2.8132, "step": 2816 }, { "epoch": 28.17, "grad_norm": 76.13809204101562, "learning_rate": 6.366666666666667e-07, "loss": 1.9503, "step": 2817 }, { "epoch": 28.18, "grad_norm": 104.13150024414062, "learning_rate": 6.333333333333334e-07, "loss": 2.8289, "step": 2818 }, { "epoch": 28.19, "grad_norm": 112.77628326416016, "learning_rate": 6.3e-07, "loss": 3.312, "step": 2819 }, { "epoch": 28.2, "grad_norm": 49.7643928527832, "learning_rate": 6.266666666666667e-07, "loss": 2.5565, "step": 2820 }, { "epoch": 28.21, "grad_norm": 92.68950653076172, "learning_rate": 6.233333333333333e-07, "loss": 2.9278, "step": 2821 }, { "epoch": 28.22, "grad_norm": 57.06440734863281, "learning_rate": 6.200000000000001e-07, "loss": 4.1695, "step": 2822 }, { "epoch": 28.23, "grad_norm": 62.0416374206543, "learning_rate": 6.166666666666668e-07, "loss": 3.0769, "step": 2823 }, { "epoch": 28.24, "grad_norm": 215.5903778076172, "learning_rate": 6.133333333333333e-07, "loss": 3.0688, "step": 2824 }, { "epoch": 28.25, "grad_norm": 76.24280548095703, "learning_rate": 6.100000000000001e-07, "loss": 2.6063, "step": 2825 }, { "epoch": 28.26, "grad_norm": 50.79069519042969, "learning_rate": 6.066666666666668e-07, "loss": 2.9215, "step": 2826 }, { "epoch": 28.27, "grad_norm": 33.20566940307617, "learning_rate": 6.033333333333334e-07, "loss": 2.9948, "step": 2827 }, { "epoch": 28.28, "grad_norm": 55.52151870727539, "learning_rate": 6.000000000000001e-07, "loss": 3.312, "step": 2828 }, { "epoch": 28.29, "grad_norm": 94.1748046875, "learning_rate": 5.966666666666667e-07, "loss": 2.6312, "step": 2829 }, { "epoch": 28.3, "grad_norm": 32.579307556152344, "learning_rate": 5.933333333333334e-07, "loss": 2.7938, "step": 2830 }, { "epoch": 28.31, "grad_norm": 71.63134002685547, "learning_rate": 5.900000000000001e-07, "loss": 2.4834, "step": 2831 }, { "epoch": 28.32, "grad_norm": 79.55813598632812, "learning_rate": 5.866666666666667e-07, "loss": 2.3269, "step": 2832 }, { "epoch": 28.33, "grad_norm": 67.79906463623047, "learning_rate": 5.833333333333334e-07, "loss": 2.6859, "step": 2833 }, { "epoch": 28.34, "grad_norm": 88.43719482421875, "learning_rate": 5.800000000000001e-07, "loss": 2.5855, "step": 2834 }, { "epoch": 28.35, "grad_norm": 36.95869827270508, "learning_rate": 5.766666666666667e-07, "loss": 3.1869, "step": 2835 }, { "epoch": 28.36, "grad_norm": 39.42185592651367, "learning_rate": 5.733333333333334e-07, "loss": 2.2976, "step": 2836 }, { "epoch": 28.37, "grad_norm": 92.34165954589844, "learning_rate": 5.7e-07, "loss": 2.7041, "step": 2837 }, { "epoch": 28.38, "grad_norm": 66.5816421508789, "learning_rate": 5.666666666666667e-07, "loss": 2.1679, "step": 2838 }, { "epoch": 28.39, "grad_norm": 92.2110824584961, "learning_rate": 5.633333333333334e-07, "loss": 3.4249, "step": 2839 }, { "epoch": 28.4, "grad_norm": 44.26503372192383, "learning_rate": 5.6e-07, "loss": 2.4772, "step": 2840 }, { "epoch": 28.41, "grad_norm": 34.916900634765625, "learning_rate": 5.566666666666667e-07, "loss": 2.8047, "step": 2841 }, { "epoch": 28.42, "grad_norm": 37.03097915649414, "learning_rate": 5.533333333333334e-07, "loss": 2.3042, "step": 2842 }, { "epoch": 28.43, "grad_norm": 80.77098846435547, "learning_rate": 5.5e-07, "loss": 3.0084, "step": 2843 }, { "epoch": 28.44, "grad_norm": 34.881690979003906, "learning_rate": 5.466666666666667e-07, "loss": 2.9363, "step": 2844 }, { "epoch": 28.45, "grad_norm": 44.09884262084961, "learning_rate": 5.433333333333334e-07, "loss": 2.845, "step": 2845 }, { "epoch": 28.46, "grad_norm": 96.36137390136719, "learning_rate": 5.4e-07, "loss": 2.9201, "step": 2846 }, { "epoch": 28.47, "grad_norm": 147.99560546875, "learning_rate": 5.366666666666667e-07, "loss": 2.5482, "step": 2847 }, { "epoch": 28.48, "grad_norm": 140.03883361816406, "learning_rate": 5.333333333333335e-07, "loss": 2.4812, "step": 2848 }, { "epoch": 28.49, "grad_norm": 35.24855041503906, "learning_rate": 5.3e-07, "loss": 2.2848, "step": 2849 }, { "epoch": 28.5, "grad_norm": 33.960235595703125, "learning_rate": 5.266666666666667e-07, "loss": 5.9674, "step": 2850 }, { "epoch": 28.5, "eval_loss": 2.6713321208953857, "eval_map": 0.0058, "eval_map_50": 0.0127, "eval_map_75": 0.0045, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0646, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0045, "eval_map_medium": 0.0079, "eval_map_neckline": 0.0106, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1274, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0346, "eval_map_small": 0.0044, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0125, "eval_mar_10": 0.0305, "eval_mar_100": 0.036, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2857, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1762, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5507, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4643, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0445, "eval_mar_medium": 0.0477, "eval_mar_small": 0.0251, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.9244, "eval_samples_per_second": 5.019, "eval_steps_per_second": 1.255, "step": 2850 }, { "epoch": 28.51, "grad_norm": 57.42973327636719, "learning_rate": 5.233333333333334e-07, "loss": 3.1027, "step": 2851 }, { "epoch": 28.52, "grad_norm": 65.59241485595703, "learning_rate": 5.2e-07, "loss": 2.8937, "step": 2852 }, { "epoch": 28.53, "grad_norm": 67.73577117919922, "learning_rate": 5.166666666666667e-07, "loss": 2.4648, "step": 2853 }, { "epoch": 28.54, "grad_norm": 42.9696159362793, "learning_rate": 5.133333333333334e-07, "loss": 3.0022, "step": 2854 }, { "epoch": 28.55, "grad_norm": 49.722129821777344, "learning_rate": 5.1e-07, "loss": 2.4037, "step": 2855 }, { "epoch": 28.56, "grad_norm": 93.41357421875, "learning_rate": 5.066666666666667e-07, "loss": 2.1252, "step": 2856 }, { "epoch": 28.57, "grad_norm": 99.08150482177734, "learning_rate": 5.033333333333334e-07, "loss": 3.0668, "step": 2857 }, { "epoch": 28.58, "grad_norm": 89.79957580566406, "learning_rate": 5.000000000000001e-07, "loss": 2.9683, "step": 2858 }, { "epoch": 28.59, "grad_norm": 68.17562103271484, "learning_rate": 4.966666666666666e-07, "loss": 2.4703, "step": 2859 }, { "epoch": 28.6, "grad_norm": 76.32723999023438, "learning_rate": 4.933333333333334e-07, "loss": 2.9934, "step": 2860 }, { "epoch": 28.61, "grad_norm": 36.56053161621094, "learning_rate": 4.900000000000001e-07, "loss": 2.293, "step": 2861 }, { "epoch": 28.62, "grad_norm": 115.06880187988281, "learning_rate": 4.866666666666666e-07, "loss": 2.8767, "step": 2862 }, { "epoch": 28.63, "grad_norm": 45.23963928222656, "learning_rate": 4.833333333333334e-07, "loss": 2.6036, "step": 2863 }, { "epoch": 28.64, "grad_norm": 58.0495491027832, "learning_rate": 4.800000000000001e-07, "loss": 2.7501, "step": 2864 }, { "epoch": 28.65, "grad_norm": 35.53668212890625, "learning_rate": 4.766666666666667e-07, "loss": 2.6529, "step": 2865 }, { "epoch": 28.66, "grad_norm": 39.215614318847656, "learning_rate": 4.7333333333333334e-07, "loss": 1.9884, "step": 2866 }, { "epoch": 28.67, "grad_norm": 310.9671630859375, "learning_rate": 4.7000000000000005e-07, "loss": 3.2951, "step": 2867 }, { "epoch": 28.68, "grad_norm": 54.53158950805664, "learning_rate": 4.666666666666667e-07, "loss": 2.6883, "step": 2868 }, { "epoch": 28.69, "grad_norm": 53.97788619995117, "learning_rate": 4.6333333333333333e-07, "loss": 2.8647, "step": 2869 }, { "epoch": 28.7, "grad_norm": 40.23823928833008, "learning_rate": 4.6000000000000004e-07, "loss": 2.285, "step": 2870 }, { "epoch": 28.71, "grad_norm": 68.31053924560547, "learning_rate": 4.566666666666667e-07, "loss": 2.181, "step": 2871 }, { "epoch": 28.72, "grad_norm": 55.363555908203125, "learning_rate": 4.533333333333334e-07, "loss": 2.4979, "step": 2872 }, { "epoch": 28.73, "grad_norm": 49.4195671081543, "learning_rate": 4.5000000000000003e-07, "loss": 2.6569, "step": 2873 }, { "epoch": 28.74, "grad_norm": 63.68266296386719, "learning_rate": 4.466666666666667e-07, "loss": 2.6712, "step": 2874 }, { "epoch": 28.75, "grad_norm": 73.00455474853516, "learning_rate": 4.433333333333334e-07, "loss": 2.5029, "step": 2875 }, { "epoch": 28.76, "grad_norm": 112.10633087158203, "learning_rate": 4.4e-07, "loss": 3.073, "step": 2876 }, { "epoch": 28.77, "grad_norm": 40.42978286743164, "learning_rate": 4.366666666666667e-07, "loss": 3.2207, "step": 2877 }, { "epoch": 28.78, "grad_norm": 47.864280700683594, "learning_rate": 4.333333333333334e-07, "loss": 2.395, "step": 2878 }, { "epoch": 28.79, "grad_norm": 48.60551834106445, "learning_rate": 4.3e-07, "loss": 2.7338, "step": 2879 }, { "epoch": 28.8, "grad_norm": 70.87472534179688, "learning_rate": 4.266666666666667e-07, "loss": 2.5601, "step": 2880 }, { "epoch": 28.81, "grad_norm": 69.79900360107422, "learning_rate": 4.233333333333334e-07, "loss": 2.8714, "step": 2881 }, { "epoch": 28.82, "grad_norm": 46.965248107910156, "learning_rate": 4.2000000000000006e-07, "loss": 2.95, "step": 2882 }, { "epoch": 28.83, "grad_norm": 57.52566909790039, "learning_rate": 4.1666666666666667e-07, "loss": 2.9573, "step": 2883 }, { "epoch": 28.84, "grad_norm": 633.8948974609375, "learning_rate": 4.133333333333334e-07, "loss": 3.0271, "step": 2884 }, { "epoch": 28.85, "grad_norm": 50.66726303100586, "learning_rate": 4.1000000000000004e-07, "loss": 3.5294, "step": 2885 }, { "epoch": 28.86, "grad_norm": 56.96826171875, "learning_rate": 4.0666666666666666e-07, "loss": 3.099, "step": 2886 }, { "epoch": 28.87, "grad_norm": 75.68335723876953, "learning_rate": 4.0333333333333337e-07, "loss": 2.8796, "step": 2887 }, { "epoch": 28.88, "grad_norm": 103.58356475830078, "learning_rate": 4.0000000000000003e-07, "loss": 2.0922, "step": 2888 }, { "epoch": 28.89, "grad_norm": 85.78108215332031, "learning_rate": 3.9666666666666675e-07, "loss": 2.8556, "step": 2889 }, { "epoch": 28.9, "grad_norm": 43.504302978515625, "learning_rate": 3.9333333333333336e-07, "loss": 2.7239, "step": 2890 }, { "epoch": 28.91, "grad_norm": 95.02520751953125, "learning_rate": 3.9e-07, "loss": 2.3034, "step": 2891 }, { "epoch": 28.92, "grad_norm": 52.77136993408203, "learning_rate": 3.8666666666666674e-07, "loss": 3.4304, "step": 2892 }, { "epoch": 28.93, "grad_norm": 39.720455169677734, "learning_rate": 3.8333333333333335e-07, "loss": 2.519, "step": 2893 }, { "epoch": 28.94, "grad_norm": 170.6627197265625, "learning_rate": 3.8e-07, "loss": 2.8422, "step": 2894 }, { "epoch": 28.95, "grad_norm": 46.561641693115234, "learning_rate": 3.7666666666666673e-07, "loss": 2.4662, "step": 2895 }, { "epoch": 28.96, "grad_norm": 40.278202056884766, "learning_rate": 3.733333333333334e-07, "loss": 2.8326, "step": 2896 }, { "epoch": 28.97, "grad_norm": 92.15301513671875, "learning_rate": 3.7e-07, "loss": 2.9868, "step": 2897 }, { "epoch": 28.98, "grad_norm": 45.54911422729492, "learning_rate": 3.666666666666667e-07, "loss": 2.7924, "step": 2898 }, { "epoch": 28.99, "grad_norm": 40.84957504272461, "learning_rate": 3.633333333333334e-07, "loss": 2.7613, "step": 2899 }, { "epoch": 29.0, "grad_norm": 89.11406707763672, "learning_rate": 3.6e-07, "loss": 2.6268, "step": 2900 }, { "epoch": 29.0, "eval_loss": 2.6703903675079346, "eval_map": 0.0056, "eval_map_50": 0.0124, "eval_map_75": 0.0045, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0621, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0043, "eval_map_medium": 0.0079, "eval_map_neckline": 0.0104, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1227, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.035, "eval_map_small": 0.0041, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0128, "eval_mar_10": 0.0302, "eval_mar_100": 0.0356, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2878, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1746, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5366, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4609, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.045, "eval_mar_medium": 0.0474, "eval_mar_small": 0.0238, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.7474, "eval_samples_per_second": 5.334, "eval_steps_per_second": 1.334, "step": 2900 }, { "epoch": 29.01, "grad_norm": 63.679866790771484, "learning_rate": 3.566666666666667e-07, "loss": 2.2009, "step": 2901 }, { "epoch": 29.02, "grad_norm": 79.6557388305664, "learning_rate": 3.533333333333334e-07, "loss": 4.0248, "step": 2902 }, { "epoch": 29.03, "grad_norm": 68.6346206665039, "learning_rate": 3.5000000000000004e-07, "loss": 2.5729, "step": 2903 }, { "epoch": 29.04, "grad_norm": 77.33415222167969, "learning_rate": 3.466666666666667e-07, "loss": 2.7965, "step": 2904 }, { "epoch": 29.05, "grad_norm": 43.091766357421875, "learning_rate": 3.4333333333333336e-07, "loss": 2.2556, "step": 2905 }, { "epoch": 29.06, "grad_norm": 86.25959777832031, "learning_rate": 3.4000000000000003e-07, "loss": 2.5336, "step": 2906 }, { "epoch": 29.07, "grad_norm": 37.40142822265625, "learning_rate": 3.366666666666667e-07, "loss": 2.6323, "step": 2907 }, { "epoch": 29.08, "grad_norm": 52.46685028076172, "learning_rate": 3.3333333333333335e-07, "loss": 2.7957, "step": 2908 }, { "epoch": 29.09, "grad_norm": 94.168212890625, "learning_rate": 3.3e-07, "loss": 3.1648, "step": 2909 }, { "epoch": 29.1, "grad_norm": 36.033348083496094, "learning_rate": 3.266666666666667e-07, "loss": 3.2881, "step": 2910 }, { "epoch": 29.11, "grad_norm": 105.79997253417969, "learning_rate": 3.2333333333333334e-07, "loss": 2.7934, "step": 2911 }, { "epoch": 29.12, "grad_norm": 51.0577507019043, "learning_rate": 3.2e-07, "loss": 3.0298, "step": 2912 }, { "epoch": 29.13, "grad_norm": 58.35346603393555, "learning_rate": 3.166666666666667e-07, "loss": 2.4864, "step": 2913 }, { "epoch": 29.14, "grad_norm": 50.889041900634766, "learning_rate": 3.1333333333333333e-07, "loss": 2.9314, "step": 2914 }, { "epoch": 29.15, "grad_norm": 100.68331909179688, "learning_rate": 3.1000000000000005e-07, "loss": 3.5478, "step": 2915 }, { "epoch": 29.16, "grad_norm": 50.592552185058594, "learning_rate": 3.0666666666666666e-07, "loss": 2.1399, "step": 2916 }, { "epoch": 29.17, "grad_norm": 47.50069046020508, "learning_rate": 3.033333333333334e-07, "loss": 2.718, "step": 2917 }, { "epoch": 29.18, "grad_norm": 56.3369140625, "learning_rate": 3.0000000000000004e-07, "loss": 2.8648, "step": 2918 }, { "epoch": 29.19, "grad_norm": 112.496826171875, "learning_rate": 2.966666666666667e-07, "loss": 2.7769, "step": 2919 }, { "epoch": 29.2, "grad_norm": 61.31437301635742, "learning_rate": 2.9333333333333337e-07, "loss": 2.2139, "step": 2920 }, { "epoch": 29.21, "grad_norm": 52.292396545410156, "learning_rate": 2.9000000000000003e-07, "loss": 5.8192, "step": 2921 }, { "epoch": 29.22, "grad_norm": 66.11973571777344, "learning_rate": 2.866666666666667e-07, "loss": 3.4142, "step": 2922 }, { "epoch": 29.23, "grad_norm": 65.9860610961914, "learning_rate": 2.8333333333333336e-07, "loss": 2.0627, "step": 2923 }, { "epoch": 29.24, "grad_norm": 44.03090286254883, "learning_rate": 2.8e-07, "loss": 2.4837, "step": 2924 }, { "epoch": 29.25, "grad_norm": 49.34619140625, "learning_rate": 2.766666666666667e-07, "loss": 2.6588, "step": 2925 }, { "epoch": 29.26, "grad_norm": 52.448604583740234, "learning_rate": 2.7333333333333335e-07, "loss": 3.1632, "step": 2926 }, { "epoch": 29.27, "grad_norm": 40.613197326660156, "learning_rate": 2.7e-07, "loss": 2.9188, "step": 2927 }, { "epoch": 29.28, "grad_norm": 74.04124450683594, "learning_rate": 2.666666666666667e-07, "loss": 2.261, "step": 2928 }, { "epoch": 29.29, "grad_norm": 68.19158935546875, "learning_rate": 2.6333333333333334e-07, "loss": 2.5431, "step": 2929 }, { "epoch": 29.3, "grad_norm": 76.5438461303711, "learning_rate": 2.6e-07, "loss": 3.2394, "step": 2930 }, { "epoch": 29.31, "grad_norm": 62.056976318359375, "learning_rate": 2.566666666666667e-07, "loss": 2.5313, "step": 2931 }, { "epoch": 29.32, "grad_norm": 48.0072021484375, "learning_rate": 2.533333333333333e-07, "loss": 2.4646, "step": 2932 }, { "epoch": 29.33, "grad_norm": 45.47304153442383, "learning_rate": 2.5000000000000004e-07, "loss": 2.9432, "step": 2933 }, { "epoch": 29.34, "grad_norm": 38.215492248535156, "learning_rate": 2.466666666666667e-07, "loss": 2.6582, "step": 2934 }, { "epoch": 29.35, "grad_norm": 51.851890563964844, "learning_rate": 2.433333333333333e-07, "loss": 3.5335, "step": 2935 }, { "epoch": 29.36, "grad_norm": 36.866294860839844, "learning_rate": 2.4000000000000003e-07, "loss": 3.0419, "step": 2936 }, { "epoch": 29.37, "grad_norm": 55.38001251220703, "learning_rate": 2.3666666666666667e-07, "loss": 2.4941, "step": 2937 }, { "epoch": 29.38, "grad_norm": 101.049560546875, "learning_rate": 2.3333333333333336e-07, "loss": 2.2199, "step": 2938 }, { "epoch": 29.39, "grad_norm": 44.236454010009766, "learning_rate": 2.3000000000000002e-07, "loss": 3.1195, "step": 2939 }, { "epoch": 29.4, "grad_norm": 100.13313293457031, "learning_rate": 2.266666666666667e-07, "loss": 3.1643, "step": 2940 }, { "epoch": 29.41, "grad_norm": 61.34566879272461, "learning_rate": 2.2333333333333335e-07, "loss": 3.1913, "step": 2941 }, { "epoch": 29.42, "grad_norm": 24.59527587890625, "learning_rate": 2.2e-07, "loss": 2.8837, "step": 2942 }, { "epoch": 29.43, "grad_norm": 82.82228088378906, "learning_rate": 2.166666666666667e-07, "loss": 3.1596, "step": 2943 }, { "epoch": 29.44, "grad_norm": 48.02663803100586, "learning_rate": 2.1333333333333334e-07, "loss": 2.2863, "step": 2944 }, { "epoch": 29.45, "grad_norm": 141.7073516845703, "learning_rate": 2.1000000000000003e-07, "loss": 2.2358, "step": 2945 }, { "epoch": 29.46, "grad_norm": 42.53813934326172, "learning_rate": 2.066666666666667e-07, "loss": 2.7284, "step": 2946 }, { "epoch": 29.47, "grad_norm": 92.86538696289062, "learning_rate": 2.0333333333333333e-07, "loss": 2.8727, "step": 2947 }, { "epoch": 29.48, "grad_norm": 40.750492095947266, "learning_rate": 2.0000000000000002e-07, "loss": 2.4909, "step": 2948 }, { "epoch": 29.49, "grad_norm": 57.90232849121094, "learning_rate": 1.9666666666666668e-07, "loss": 2.6017, "step": 2949 }, { "epoch": 29.5, "grad_norm": 80.1380615234375, "learning_rate": 1.9333333333333337e-07, "loss": 2.5974, "step": 2950 }, { "epoch": 29.5, "eval_loss": 2.6705124378204346, "eval_map": 0.0057, "eval_map_50": 0.0124, "eval_map_75": 0.0046, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0651, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0054, "eval_map_medium": 0.0081, "eval_map_neckline": 0.0106, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1224, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0362, "eval_map_small": 0.0041, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0129, "eval_mar_10": 0.0309, "eval_mar_100": 0.0362, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2939, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1889, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5366, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4661, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0449, "eval_mar_medium": 0.0494, "eval_mar_small": 0.0238, "eval_model_preparation_time": 0.0124, "eval_runtime": 19.6346, "eval_samples_per_second": 5.093, "eval_steps_per_second": 1.273, "step": 2950 }, { "epoch": 29.51, "grad_norm": 45.12267303466797, "learning_rate": 1.9e-07, "loss": 2.746, "step": 2951 }, { "epoch": 29.52, "grad_norm": 80.55419158935547, "learning_rate": 1.866666666666667e-07, "loss": 2.2481, "step": 2952 }, { "epoch": 29.53, "grad_norm": 75.60895538330078, "learning_rate": 1.8333333333333336e-07, "loss": 5.6898, "step": 2953 }, { "epoch": 29.54, "grad_norm": 113.02906799316406, "learning_rate": 1.8e-07, "loss": 3.0503, "step": 2954 }, { "epoch": 29.55, "grad_norm": 60.408294677734375, "learning_rate": 1.766666666666667e-07, "loss": 2.7358, "step": 2955 }, { "epoch": 29.56, "grad_norm": 51.82402038574219, "learning_rate": 1.7333333333333335e-07, "loss": 3.1109, "step": 2956 }, { "epoch": 29.57, "grad_norm": 74.95259857177734, "learning_rate": 1.7000000000000001e-07, "loss": 3.1183, "step": 2957 }, { "epoch": 29.58, "grad_norm": 86.92271423339844, "learning_rate": 1.6666666666666668e-07, "loss": 1.8364, "step": 2958 }, { "epoch": 29.59, "grad_norm": 382.7980041503906, "learning_rate": 1.6333333333333334e-07, "loss": 2.8853, "step": 2959 }, { "epoch": 29.6, "grad_norm": 59.902217864990234, "learning_rate": 1.6e-07, "loss": 2.937, "step": 2960 }, { "epoch": 29.61, "grad_norm": 49.124752044677734, "learning_rate": 1.5666666666666667e-07, "loss": 2.0446, "step": 2961 }, { "epoch": 29.62, "grad_norm": 56.141719818115234, "learning_rate": 1.5333333333333333e-07, "loss": 2.664, "step": 2962 }, { "epoch": 29.63, "grad_norm": 30.23763084411621, "learning_rate": 1.5000000000000002e-07, "loss": 2.8402, "step": 2963 }, { "epoch": 29.64, "grad_norm": 112.08224487304688, "learning_rate": 1.4666666666666668e-07, "loss": 2.0918, "step": 2964 }, { "epoch": 29.65, "grad_norm": 41.78651809692383, "learning_rate": 1.4333333333333335e-07, "loss": 2.944, "step": 2965 }, { "epoch": 29.66, "grad_norm": 51.57624053955078, "learning_rate": 1.4e-07, "loss": 2.3596, "step": 2966 }, { "epoch": 29.67, "grad_norm": 60.676048278808594, "learning_rate": 1.3666666666666667e-07, "loss": 3.3252, "step": 2967 }, { "epoch": 29.68, "grad_norm": 196.72164916992188, "learning_rate": 1.3333333333333336e-07, "loss": 2.8015, "step": 2968 }, { "epoch": 29.69, "grad_norm": 61.63528823852539, "learning_rate": 1.3e-07, "loss": 2.5777, "step": 2969 }, { "epoch": 29.7, "grad_norm": 50.57680130004883, "learning_rate": 1.2666666666666666e-07, "loss": 2.3747, "step": 2970 }, { "epoch": 29.71, "grad_norm": 392.16790771484375, "learning_rate": 1.2333333333333335e-07, "loss": 3.1427, "step": 2971 }, { "epoch": 29.72, "grad_norm": 213.79774475097656, "learning_rate": 1.2000000000000002e-07, "loss": 3.1351, "step": 2972 }, { "epoch": 29.73, "grad_norm": 86.00875091552734, "learning_rate": 1.1666666666666668e-07, "loss": 2.8655, "step": 2973 }, { "epoch": 29.74, "grad_norm": 134.1068572998047, "learning_rate": 1.1333333333333336e-07, "loss": 3.0478, "step": 2974 }, { "epoch": 29.75, "grad_norm": 69.30460357666016, "learning_rate": 1.1e-07, "loss": 2.6355, "step": 2975 }, { "epoch": 29.76, "grad_norm": 61.68446350097656, "learning_rate": 1.0666666666666667e-07, "loss": 2.8361, "step": 2976 }, { "epoch": 29.77, "grad_norm": 171.46670532226562, "learning_rate": 1.0333333333333335e-07, "loss": 2.7123, "step": 2977 }, { "epoch": 29.78, "grad_norm": 72.21646118164062, "learning_rate": 1.0000000000000001e-07, "loss": 2.4395, "step": 2978 }, { "epoch": 29.79, "grad_norm": 45.89389419555664, "learning_rate": 9.666666666666669e-08, "loss": 3.5872, "step": 2979 }, { "epoch": 29.8, "grad_norm": 134.54237365722656, "learning_rate": 9.333333333333335e-08, "loss": 3.3266, "step": 2980 }, { "epoch": 29.81, "grad_norm": 80.27899932861328, "learning_rate": 9e-08, "loss": 2.8887, "step": 2981 }, { "epoch": 29.82, "grad_norm": 31.392784118652344, "learning_rate": 8.666666666666668e-08, "loss": 3.4849, "step": 2982 }, { "epoch": 29.83, "grad_norm": 50.517520904541016, "learning_rate": 8.333333333333334e-08, "loss": 3.7839, "step": 2983 }, { "epoch": 29.84, "grad_norm": 129.22604370117188, "learning_rate": 8e-08, "loss": 2.8438, "step": 2984 }, { "epoch": 29.85, "grad_norm": 651.9296264648438, "learning_rate": 7.666666666666666e-08, "loss": 2.7821, "step": 2985 }, { "epoch": 29.86, "grad_norm": 57.15330505371094, "learning_rate": 7.333333333333334e-08, "loss": 2.8274, "step": 2986 }, { "epoch": 29.87, "grad_norm": 104.18187713623047, "learning_rate": 7e-08, "loss": 2.8014, "step": 2987 }, { "epoch": 29.88, "grad_norm": 33.75153350830078, "learning_rate": 6.666666666666668e-08, "loss": 2.8256, "step": 2988 }, { "epoch": 29.89, "grad_norm": 65.3919677734375, "learning_rate": 6.333333333333333e-08, "loss": 2.7101, "step": 2989 }, { "epoch": 29.9, "grad_norm": 48.03522872924805, "learning_rate": 6.000000000000001e-08, "loss": 2.2407, "step": 2990 }, { "epoch": 29.91, "grad_norm": 91.13877868652344, "learning_rate": 5.666666666666668e-08, "loss": 2.223, "step": 2991 }, { "epoch": 29.92, "grad_norm": 55.667545318603516, "learning_rate": 5.3333333333333334e-08, "loss": 2.7852, "step": 2992 }, { "epoch": 29.93, "grad_norm": 52.84622573852539, "learning_rate": 5.0000000000000004e-08, "loss": 2.934, "step": 2993 }, { "epoch": 29.94, "grad_norm": 52.816734313964844, "learning_rate": 4.6666666666666674e-08, "loss": 2.5578, "step": 2994 }, { "epoch": 29.95, "grad_norm": 36.82888412475586, "learning_rate": 4.333333333333334e-08, "loss": 2.7392, "step": 2995 }, { "epoch": 29.96, "grad_norm": 44.7187614440918, "learning_rate": 4e-08, "loss": 2.5095, "step": 2996 }, { "epoch": 29.97, "grad_norm": 44.93411636352539, "learning_rate": 3.666666666666667e-08, "loss": 3.4538, "step": 2997 }, { "epoch": 29.98, "grad_norm": 33.02327346801758, "learning_rate": 3.333333333333334e-08, "loss": 2.1049, "step": 2998 }, { "epoch": 29.99, "grad_norm": 140.58895874023438, "learning_rate": 3.0000000000000004e-08, "loss": 1.9942, "step": 2999 }, { "epoch": 30.0, "grad_norm": 154.81504821777344, "learning_rate": 2.6666666666666667e-08, "loss": 2.4689, "step": 3000 }, { "epoch": 30.0, "eval_loss": 2.6711971759796143, "eval_map": 0.0058, "eval_map_50": 0.0128, "eval_map_75": 0.0046, "eval_map_applique": 0.0, "eval_map_bag, wallet": 0.0, "eval_map_belt": 0.0, "eval_map_bow": 0.0, "eval_map_buckle": 0.0, "eval_map_cape": 0.0, "eval_map_cardigan": 0.0, "eval_map_coat": 0.0, "eval_map_collar": 0.0, "eval_map_dress": 0.0681, "eval_map_epaulette": 0.0, "eval_map_flower": 0.0, "eval_map_glasses": 0.0, "eval_map_glove": 0.0, "eval_map_hat": 0.0, "eval_map_headband, head covering, hair accessory": 0.0, "eval_map_hood": 0.0, "eval_map_jacket": 0.0, "eval_map_jumpsuit": 0.0, "eval_map_lapel": 0.0, "eval_map_large": 0.0055, "eval_map_medium": 0.0081, "eval_map_neckline": 0.0106, "eval_map_pants": 0.0, "eval_map_pocket": 0.0, "eval_map_ribbon": 0.0, "eval_map_rivet": 0.0, "eval_map_ruffle": 0.0, "eval_map_scarf": 0.0, "eval_map_sequin": 0.0, "eval_map_shirt, blouse": 0.0, "eval_map_shoe": 0.1226, "eval_map_shorts": 0.0, "eval_map_skirt": 0.0, "eval_map_sleeve": 0.0368, "eval_map_small": 0.0042, "eval_map_sock": 0.0, "eval_map_tie": 0.0, "eval_map_tights, stockings": 0.0, "eval_map_top, t-shirt, sweatshirt": 0.0, "eval_map_umbrella": 0.0, "eval_map_vest": 0.0, "eval_map_watch": 0.0, "eval_map_zipper": 0.0, "eval_mar_1": 0.0128, "eval_mar_10": 0.0308, "eval_mar_100": 0.0362, "eval_mar_100_applique": 0.0, "eval_mar_100_bag, wallet": 0.0, "eval_mar_100_belt": 0.0, "eval_mar_100_bow": 0.0, "eval_mar_100_buckle": 0.0, "eval_mar_100_cape": 0.0, "eval_mar_100_cardigan": 0.0, "eval_mar_100_coat": 0.0, "eval_mar_100_collar": 0.0, "eval_mar_100_dress": 0.2918, "eval_mar_100_epaulette": 0.0, "eval_mar_100_flower": 0.0, "eval_mar_100_glasses": 0.0, "eval_mar_100_glove": 0.0, "eval_mar_100_hat": 0.0, "eval_mar_100_headband, head covering, hair accessory": 0.0, "eval_mar_100_hood": 0.0, "eval_mar_100_jacket": 0.0, "eval_mar_100_jumpsuit": 0.0, "eval_mar_100_lapel": 0.0, "eval_mar_100_neckline": 0.1905, "eval_mar_100_pants": 0.0, "eval_mar_100_pocket": 0.0, "eval_mar_100_ribbon": 0.0, "eval_mar_100_rivet": 0.0, "eval_mar_100_ruffle": 0.0, "eval_mar_100_scarf": 0.0, "eval_mar_100_sequin": 0.0, "eval_mar_100_shirt, blouse": 0.0, "eval_mar_100_shoe": 0.5358, "eval_mar_100_shorts": 0.0, "eval_mar_100_skirt": 0.0, "eval_mar_100_sleeve": 0.4661, "eval_mar_100_sock": 0.0, "eval_mar_100_tie": 0.0, "eval_mar_100_tights, stockings": 0.0, "eval_mar_100_top, t-shirt, sweatshirt": 0.0, "eval_mar_100_umbrella": 0.0, "eval_mar_100_vest": 0.0, "eval_mar_100_watch": 0.0, "eval_mar_100_zipper": 0.0, "eval_mar_large": 0.0448, "eval_mar_medium": 0.0494, "eval_mar_small": 0.0239, "eval_model_preparation_time": 0.0124, "eval_runtime": 18.7424, "eval_samples_per_second": 5.336, "eval_steps_per_second": 1.334, "step": 3000 }, { "epoch": 30.0, "step": 3000, "total_flos": 5.73524623872e+18, "train_loss": 3.2792244461774827, "train_runtime": 7125.6928, "train_samples_per_second": 1.684, "train_steps_per_second": 0.421 } ], "logging_steps": 1, "max_steps": 3000, "num_input_tokens_seen": 0, "num_train_epochs": 30, "save_steps": 10, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": true }, "attributes": {} } }, "total_flos": 5.73524623872e+18, "train_batch_size": 4, "trial_name": null, "trial_params": null }