Training in progress, step 2800
Browse files- model-00001-of-00002.safetensors +1 -1
- model-00002-of-00002.safetensors +1 -1
- trainer_log.jsonl +401 -0
model-00001-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4988025760
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f34cae3fab9c8ec5ea11464dd24236ddae6333b02fdf637881b3d0adca547e25
|
3 |
size 4988025760
|
model-00002-of-00002.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 240691728
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:53b9707b26ea05c3c7423de33aa3a64c775fd2103303d0de2bcc6f35ce565f09
|
3 |
size 240691728
|
trainer_log.jsonl
CHANGED
@@ -2411,3 +2411,404 @@
|
|
2411 |
{"current_steps": 2411, "total_steps": 3906, "loss": 1.3785, "learning_rate": 1.3030793291059545e-05, "epoch": 0.617216, "percentage": 61.73, "elapsed_time": "8:51:49", "remaining_time": "5:29:46"}
|
2412 |
{"current_steps": 2412, "total_steps": 3906, "loss": 1.3771, "learning_rate": 1.3015561818806055e-05, "epoch": 0.617472, "percentage": 61.75, "elapsed_time": "8:52:02", "remaining_time": "5:29:32"}
|
2413 |
{"current_steps": 2413, "total_steps": 3906, "loss": 1.3389, "learning_rate": 1.3000334958745856e-05, "epoch": 0.617728, "percentage": 61.78, "elapsed_time": "8:52:15", "remaining_time": "5:29:19"}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2411 |
{"current_steps": 2411, "total_steps": 3906, "loss": 1.3785, "learning_rate": 1.3030793291059545e-05, "epoch": 0.617216, "percentage": 61.73, "elapsed_time": "8:51:49", "remaining_time": "5:29:46"}
|
2412 |
{"current_steps": 2412, "total_steps": 3906, "loss": 1.3771, "learning_rate": 1.3015561818806055e-05, "epoch": 0.617472, "percentage": 61.75, "elapsed_time": "8:52:02", "remaining_time": "5:29:32"}
|
2413 |
{"current_steps": 2413, "total_steps": 3906, "loss": 1.3389, "learning_rate": 1.3000334958745856e-05, "epoch": 0.617728, "percentage": 61.78, "elapsed_time": "8:52:15", "remaining_time": "5:29:19"}
|
2414 |
+
{"current_steps": 2414, "total_steps": 3906, "loss": 1.3434, "learning_rate": 1.298511272093405e-05, "epoch": 0.617984, "percentage": 61.8, "elapsed_time": "8:52:28", "remaining_time": "5:29:06"}
|
2415 |
+
{"current_steps": 2415, "total_steps": 3906, "loss": 1.3783, "learning_rate": 1.2969895115422682e-05, "epoch": 0.61824, "percentage": 61.83, "elapsed_time": "8:52:42", "remaining_time": "5:28:53"}
|
2416 |
+
{"current_steps": 2416, "total_steps": 3906, "loss": 1.3487, "learning_rate": 1.2954682152260751e-05, "epoch": 0.618496, "percentage": 61.85, "elapsed_time": "8:52:55", "remaining_time": "5:28:39"}
|
2417 |
+
{"current_steps": 2417, "total_steps": 3906, "loss": 1.3774, "learning_rate": 1.2939473841494163e-05, "epoch": 0.618752, "percentage": 61.88, "elapsed_time": "8:53:08", "remaining_time": "5:28:26"}
|
2418 |
+
{"current_steps": 2418, "total_steps": 3906, "loss": 1.3145, "learning_rate": 1.2924270193165778e-05, "epoch": 0.619008, "percentage": 61.9, "elapsed_time": "8:53:21", "remaining_time": "5:28:13"}
|
2419 |
+
{"current_steps": 2419, "total_steps": 3906, "loss": 1.3623, "learning_rate": 1.2909071217315363e-05, "epoch": 0.619264, "percentage": 61.93, "elapsed_time": "8:53:34", "remaining_time": "5:28:00"}
|
2420 |
+
{"current_steps": 2420, "total_steps": 3906, "loss": 1.3497, "learning_rate": 1.2893876923979614e-05, "epoch": 0.61952, "percentage": 61.96, "elapsed_time": "8:53:48", "remaining_time": "5:27:46"}
|
2421 |
+
{"current_steps": 2421, "total_steps": 3906, "loss": 1.325, "learning_rate": 1.2878687323192111e-05, "epoch": 0.619776, "percentage": 61.98, "elapsed_time": "8:54:01", "remaining_time": "5:27:33"}
|
2422 |
+
{"current_steps": 2422, "total_steps": 3906, "loss": 1.3325, "learning_rate": 1.2863502424983369e-05, "epoch": 0.620032, "percentage": 62.01, "elapsed_time": "8:54:14", "remaining_time": "5:27:20"}
|
2423 |
+
{"current_steps": 2423, "total_steps": 3906, "loss": 1.3326, "learning_rate": 1.2848322239380758e-05, "epoch": 0.620288, "percentage": 62.03, "elapsed_time": "8:54:27", "remaining_time": "5:27:07"}
|
2424 |
+
{"current_steps": 2424, "total_steps": 3906, "loss": 1.3554, "learning_rate": 1.2833146776408563e-05, "epoch": 0.620544, "percentage": 62.06, "elapsed_time": "8:54:40", "remaining_time": "5:26:53"}
|
2425 |
+
{"current_steps": 2425, "total_steps": 3906, "loss": 1.339, "learning_rate": 1.2817976046087943e-05, "epoch": 0.6208, "percentage": 62.08, "elapsed_time": "8:54:53", "remaining_time": "5:26:40"}
|
2426 |
+
{"current_steps": 2426, "total_steps": 3906, "loss": 1.3947, "learning_rate": 1.2802810058436938e-05, "epoch": 0.621056, "percentage": 62.11, "elapsed_time": "8:55:07", "remaining_time": "5:26:27"}
|
2427 |
+
{"current_steps": 2427, "total_steps": 3906, "loss": 1.3138, "learning_rate": 1.2787648823470443e-05, "epoch": 0.621312, "percentage": 62.14, "elapsed_time": "8:55:20", "remaining_time": "5:26:13"}
|
2428 |
+
{"current_steps": 2428, "total_steps": 3906, "loss": 1.3308, "learning_rate": 1.2772492351200234e-05, "epoch": 0.621568, "percentage": 62.16, "elapsed_time": "8:55:33", "remaining_time": "5:26:00"}
|
2429 |
+
{"current_steps": 2429, "total_steps": 3906, "loss": 1.3421, "learning_rate": 1.2757340651634911e-05, "epoch": 0.621824, "percentage": 62.19, "elapsed_time": "8:55:46", "remaining_time": "5:25:47"}
|
2430 |
+
{"current_steps": 2430, "total_steps": 3906, "loss": 1.3473, "learning_rate": 1.274219373477995e-05, "epoch": 0.62208, "percentage": 62.21, "elapsed_time": "8:55:59", "remaining_time": "5:25:34"}
|
2431 |
+
{"current_steps": 2431, "total_steps": 3906, "loss": 1.3024, "learning_rate": 1.2727051610637666e-05, "epoch": 0.622336, "percentage": 62.24, "elapsed_time": "8:56:12", "remaining_time": "5:25:20"}
|
2432 |
+
{"current_steps": 2432, "total_steps": 3906, "loss": 1.3302, "learning_rate": 1.2711914289207195e-05, "epoch": 0.622592, "percentage": 62.26, "elapsed_time": "8:56:26", "remaining_time": "5:25:07"}
|
2433 |
+
{"current_steps": 2433, "total_steps": 3906, "loss": 1.343, "learning_rate": 1.2696781780484513e-05, "epoch": 0.622848, "percentage": 62.29, "elapsed_time": "8:56:39", "remaining_time": "5:24:54"}
|
2434 |
+
{"current_steps": 2434, "total_steps": 3906, "loss": 1.3832, "learning_rate": 1.268165409446242e-05, "epoch": 0.623104, "percentage": 62.31, "elapsed_time": "8:56:52", "remaining_time": "5:24:41"}
|
2435 |
+
{"current_steps": 2435, "total_steps": 3906, "loss": 1.3692, "learning_rate": 1.2666531241130513e-05, "epoch": 0.62336, "percentage": 62.34, "elapsed_time": "8:57:05", "remaining_time": "5:24:27"}
|
2436 |
+
{"current_steps": 2436, "total_steps": 3906, "loss": 1.3404, "learning_rate": 1.265141323047522e-05, "epoch": 0.623616, "percentage": 62.37, "elapsed_time": "8:57:18", "remaining_time": "5:24:14"}
|
2437 |
+
{"current_steps": 2437, "total_steps": 3906, "loss": 1.3256, "learning_rate": 1.2636300072479758e-05, "epoch": 0.623872, "percentage": 62.39, "elapsed_time": "8:57:32", "remaining_time": "5:24:01"}
|
2438 |
+
{"current_steps": 2438, "total_steps": 3906, "loss": 1.39, "learning_rate": 1.2621191777124144e-05, "epoch": 0.624128, "percentage": 62.42, "elapsed_time": "8:57:45", "remaining_time": "5:23:48"}
|
2439 |
+
{"current_steps": 2439, "total_steps": 3906, "loss": 1.3666, "learning_rate": 1.260608835438518e-05, "epoch": 0.624384, "percentage": 62.44, "elapsed_time": "8:57:58", "remaining_time": "5:23:34"}
|
2440 |
+
{"current_steps": 2440, "total_steps": 3906, "loss": 1.3466, "learning_rate": 1.2590989814236467e-05, "epoch": 0.62464, "percentage": 62.47, "elapsed_time": "8:58:11", "remaining_time": "5:23:21"}
|
2441 |
+
{"current_steps": 2441, "total_steps": 3906, "loss": 1.3569, "learning_rate": 1.2575896166648349e-05, "epoch": 0.624896, "percentage": 62.49, "elapsed_time": "8:58:24", "remaining_time": "5:23:08"}
|
2442 |
+
{"current_steps": 2442, "total_steps": 3906, "loss": 1.3271, "learning_rate": 1.2560807421587967e-05, "epoch": 0.625152, "percentage": 62.52, "elapsed_time": "8:58:38", "remaining_time": "5:22:54"}
|
2443 |
+
{"current_steps": 2443, "total_steps": 3906, "loss": 1.3551, "learning_rate": 1.254572358901922e-05, "epoch": 0.625408, "percentage": 62.54, "elapsed_time": "8:58:51", "remaining_time": "5:22:41"}
|
2444 |
+
{"current_steps": 2444, "total_steps": 3906, "loss": 1.3554, "learning_rate": 1.2530644678902752e-05, "epoch": 0.625664, "percentage": 62.57, "elapsed_time": "8:59:04", "remaining_time": "5:22:28"}
|
2445 |
+
{"current_steps": 2445, "total_steps": 3906, "loss": 1.3747, "learning_rate": 1.251557070119597e-05, "epoch": 0.62592, "percentage": 62.6, "elapsed_time": "8:59:17", "remaining_time": "5:22:15"}
|
2446 |
+
{"current_steps": 2446, "total_steps": 3906, "loss": 1.3243, "learning_rate": 1.2500501665853016e-05, "epoch": 0.626176, "percentage": 62.62, "elapsed_time": "8:59:30", "remaining_time": "5:22:01"}
|
2447 |
+
{"current_steps": 2447, "total_steps": 3906, "loss": 1.3807, "learning_rate": 1.2485437582824764e-05, "epoch": 0.626432, "percentage": 62.65, "elapsed_time": "8:59:44", "remaining_time": "5:21:48"}
|
2448 |
+
{"current_steps": 2448, "total_steps": 3906, "loss": 1.3534, "learning_rate": 1.2470378462058826e-05, "epoch": 0.626688, "percentage": 62.67, "elapsed_time": "8:59:57", "remaining_time": "5:21:35"}
|
2449 |
+
{"current_steps": 2449, "total_steps": 3906, "loss": 1.3517, "learning_rate": 1.245532431349954e-05, "epoch": 0.626944, "percentage": 62.7, "elapsed_time": "9:00:10", "remaining_time": "5:21:22"}
|
2450 |
+
{"current_steps": 2450, "total_steps": 3906, "loss": 1.3744, "learning_rate": 1.2440275147087947e-05, "epoch": 0.6272, "percentage": 62.72, "elapsed_time": "9:00:23", "remaining_time": "5:21:08"}
|
2451 |
+
{"current_steps": 2451, "total_steps": 3906, "loss": 1.3666, "learning_rate": 1.2425230972761808e-05, "epoch": 0.627456, "percentage": 62.75, "elapsed_time": "9:00:36", "remaining_time": "5:20:55"}
|
2452 |
+
{"current_steps": 2452, "total_steps": 3906, "loss": 1.3044, "learning_rate": 1.24101918004556e-05, "epoch": 0.627712, "percentage": 62.78, "elapsed_time": "9:00:50", "remaining_time": "5:20:42"}
|
2453 |
+
{"current_steps": 2453, "total_steps": 3906, "loss": 1.2613, "learning_rate": 1.2395157640100459e-05, "epoch": 0.627968, "percentage": 62.8, "elapsed_time": "9:01:03", "remaining_time": "5:20:29"}
|
2454 |
+
{"current_steps": 2454, "total_steps": 3906, "loss": 1.3316, "learning_rate": 1.2380128501624248e-05, "epoch": 0.628224, "percentage": 62.83, "elapsed_time": "9:01:16", "remaining_time": "5:20:15"}
|
2455 |
+
{"current_steps": 2455, "total_steps": 3906, "loss": 1.3177, "learning_rate": 1.2365104394951498e-05, "epoch": 0.62848, "percentage": 62.85, "elapsed_time": "9:01:29", "remaining_time": "5:20:02"}
|
2456 |
+
{"current_steps": 2456, "total_steps": 3906, "loss": 1.3284, "learning_rate": 1.2350085330003419e-05, "epoch": 0.628736, "percentage": 62.88, "elapsed_time": "9:01:42", "remaining_time": "5:19:49"}
|
2457 |
+
{"current_steps": 2457, "total_steps": 3906, "loss": 1.3464, "learning_rate": 1.2335071316697895e-05, "epoch": 0.628992, "percentage": 62.9, "elapsed_time": "9:01:56", "remaining_time": "5:19:36"}
|
2458 |
+
{"current_steps": 2458, "total_steps": 3906, "loss": 1.3737, "learning_rate": 1.2320062364949478e-05, "epoch": 0.629248, "percentage": 62.93, "elapsed_time": "9:02:09", "remaining_time": "5:19:22"}
|
2459 |
+
{"current_steps": 2459, "total_steps": 3906, "loss": 1.3795, "learning_rate": 1.2305058484669356e-05, "epoch": 0.629504, "percentage": 62.95, "elapsed_time": "9:02:22", "remaining_time": "5:19:09"}
|
2460 |
+
{"current_steps": 2460, "total_steps": 3906, "loss": 1.3317, "learning_rate": 1.2290059685765395e-05, "epoch": 0.62976, "percentage": 62.98, "elapsed_time": "9:02:35", "remaining_time": "5:18:56"}
|
2461 |
+
{"current_steps": 2461, "total_steps": 3906, "loss": 1.3408, "learning_rate": 1.2275065978142089e-05, "epoch": 0.630016, "percentage": 63.01, "elapsed_time": "9:02:48", "remaining_time": "5:18:43"}
|
2462 |
+
{"current_steps": 2462, "total_steps": 3906, "loss": 1.3484, "learning_rate": 1.2260077371700572e-05, "epoch": 0.630272, "percentage": 63.03, "elapsed_time": "9:03:02", "remaining_time": "5:18:29"}
|
2463 |
+
{"current_steps": 2463, "total_steps": 3906, "loss": 1.3448, "learning_rate": 1.2245093876338618e-05, "epoch": 0.630528, "percentage": 63.06, "elapsed_time": "9:03:15", "remaining_time": "5:18:16"}
|
2464 |
+
{"current_steps": 2464, "total_steps": 3906, "loss": 1.3559, "learning_rate": 1.2230115501950626e-05, "epoch": 0.630784, "percentage": 63.08, "elapsed_time": "9:03:28", "remaining_time": "5:18:03"}
|
2465 |
+
{"current_steps": 2465, "total_steps": 3906, "loss": 1.3727, "learning_rate": 1.221514225842759e-05, "epoch": 0.63104, "percentage": 63.11, "elapsed_time": "9:03:41", "remaining_time": "5:17:50"}
|
2466 |
+
{"current_steps": 2466, "total_steps": 3906, "loss": 1.2875, "learning_rate": 1.2200174155657147e-05, "epoch": 0.631296, "percentage": 63.13, "elapsed_time": "9:03:54", "remaining_time": "5:17:36"}
|
2467 |
+
{"current_steps": 2467, "total_steps": 3906, "loss": 1.3075, "learning_rate": 1.2185211203523521e-05, "epoch": 0.631552, "percentage": 63.16, "elapsed_time": "9:04:07", "remaining_time": "5:17:23"}
|
2468 |
+
{"current_steps": 2468, "total_steps": 3906, "loss": 1.3216, "learning_rate": 1.2170253411907536e-05, "epoch": 0.631808, "percentage": 63.18, "elapsed_time": "9:04:21", "remaining_time": "5:17:10"}
|
2469 |
+
{"current_steps": 2469, "total_steps": 3906, "loss": 1.3568, "learning_rate": 1.2155300790686617e-05, "epoch": 0.632064, "percentage": 63.21, "elapsed_time": "9:04:34", "remaining_time": "5:16:56"}
|
2470 |
+
{"current_steps": 2470, "total_steps": 3906, "loss": 1.3665, "learning_rate": 1.2140353349734776e-05, "epoch": 0.63232, "percentage": 63.24, "elapsed_time": "9:04:47", "remaining_time": "5:16:43"}
|
2471 |
+
{"current_steps": 2471, "total_steps": 3906, "loss": 1.3672, "learning_rate": 1.212541109892258e-05, "epoch": 0.632576, "percentage": 63.26, "elapsed_time": "9:05:00", "remaining_time": "5:16:30"}
|
2472 |
+
{"current_steps": 2472, "total_steps": 3906, "loss": 1.4082, "learning_rate": 1.2110474048117198e-05, "epoch": 0.632832, "percentage": 63.29, "elapsed_time": "9:05:13", "remaining_time": "5:16:17"}
|
2473 |
+
{"current_steps": 2473, "total_steps": 3906, "loss": 1.3281, "learning_rate": 1.2095542207182348e-05, "epoch": 0.633088, "percentage": 63.31, "elapsed_time": "9:05:27", "remaining_time": "5:16:04"}
|
2474 |
+
{"current_steps": 2474, "total_steps": 3906, "loss": 1.4126, "learning_rate": 1.2080615585978315e-05, "epoch": 0.633344, "percentage": 63.34, "elapsed_time": "9:05:40", "remaining_time": "5:15:50"}
|
2475 |
+
{"current_steps": 2475, "total_steps": 3906, "loss": 1.3688, "learning_rate": 1.2065694194361934e-05, "epoch": 0.6336, "percentage": 63.36, "elapsed_time": "9:05:53", "remaining_time": "5:15:37"}
|
2476 |
+
{"current_steps": 2476, "total_steps": 3906, "loss": 1.3437, "learning_rate": 1.2050778042186594e-05, "epoch": 0.633856, "percentage": 63.39, "elapsed_time": "9:06:06", "remaining_time": "5:15:24"}
|
2477 |
+
{"current_steps": 2477, "total_steps": 3906, "loss": 1.3301, "learning_rate": 1.2035867139302218e-05, "epoch": 0.634112, "percentage": 63.42, "elapsed_time": "9:06:19", "remaining_time": "5:15:10"}
|
2478 |
+
{"current_steps": 2478, "total_steps": 3906, "loss": 1.3528, "learning_rate": 1.2020961495555247e-05, "epoch": 0.634368, "percentage": 63.44, "elapsed_time": "9:06:33", "remaining_time": "5:14:57"}
|
2479 |
+
{"current_steps": 2479, "total_steps": 3906, "loss": 1.3343, "learning_rate": 1.2006061120788677e-05, "epoch": 0.634624, "percentage": 63.47, "elapsed_time": "9:06:46", "remaining_time": "5:14:44"}
|
2480 |
+
{"current_steps": 2480, "total_steps": 3906, "loss": 1.3635, "learning_rate": 1.199116602484201e-05, "epoch": 0.63488, "percentage": 63.49, "elapsed_time": "9:06:59", "remaining_time": "5:14:31"}
|
2481 |
+
{"current_steps": 2481, "total_steps": 3906, "loss": 1.3371, "learning_rate": 1.1976276217551268e-05, "epoch": 0.635136, "percentage": 63.52, "elapsed_time": "9:07:12", "remaining_time": "5:14:17"}
|
2482 |
+
{"current_steps": 2482, "total_steps": 3906, "loss": 1.353, "learning_rate": 1.1961391708748966e-05, "epoch": 0.635392, "percentage": 63.54, "elapsed_time": "9:07:25", "remaining_time": "5:14:04"}
|
2483 |
+
{"current_steps": 2483, "total_steps": 3906, "loss": 1.3123, "learning_rate": 1.1946512508264152e-05, "epoch": 0.635648, "percentage": 63.57, "elapsed_time": "9:07:39", "remaining_time": "5:13:51"}
|
2484 |
+
{"current_steps": 2484, "total_steps": 3906, "loss": 1.3345, "learning_rate": 1.1931638625922322e-05, "epoch": 0.635904, "percentage": 63.59, "elapsed_time": "9:07:52", "remaining_time": "5:13:38"}
|
2485 |
+
{"current_steps": 2485, "total_steps": 3906, "loss": 1.3771, "learning_rate": 1.1916770071545499e-05, "epoch": 0.63616, "percentage": 63.62, "elapsed_time": "9:08:05", "remaining_time": "5:13:24"}
|
2486 |
+
{"current_steps": 2486, "total_steps": 3906, "loss": 1.3453, "learning_rate": 1.1901906854952172e-05, "epoch": 0.636416, "percentage": 63.65, "elapsed_time": "9:08:18", "remaining_time": "5:13:11"}
|
2487 |
+
{"current_steps": 2487, "total_steps": 3906, "loss": 1.3418, "learning_rate": 1.1887048985957312e-05, "epoch": 0.636672, "percentage": 63.67, "elapsed_time": "9:08:31", "remaining_time": "5:12:58"}
|
2488 |
+
{"current_steps": 2488, "total_steps": 3906, "loss": 1.3237, "learning_rate": 1.187219647437235e-05, "epoch": 0.636928, "percentage": 63.7, "elapsed_time": "9:08:45", "remaining_time": "5:12:45"}
|
2489 |
+
{"current_steps": 2489, "total_steps": 3906, "loss": 1.3491, "learning_rate": 1.1857349330005193e-05, "epoch": 0.637184, "percentage": 63.72, "elapsed_time": "9:08:58", "remaining_time": "5:12:31"}
|
2490 |
+
{"current_steps": 2490, "total_steps": 3906, "loss": 1.3229, "learning_rate": 1.1842507562660176e-05, "epoch": 0.63744, "percentage": 63.75, "elapsed_time": "9:09:11", "remaining_time": "5:12:18"}
|
2491 |
+
{"current_steps": 2491, "total_steps": 3906, "loss": 1.3966, "learning_rate": 1.1827671182138114e-05, "epoch": 0.637696, "percentage": 63.77, "elapsed_time": "9:09:24", "remaining_time": "5:12:05"}
|
2492 |
+
{"current_steps": 2492, "total_steps": 3906, "loss": 1.3174, "learning_rate": 1.1812840198236248e-05, "epoch": 0.637952, "percentage": 63.8, "elapsed_time": "9:09:37", "remaining_time": "5:11:52"}
|
2493 |
+
{"current_steps": 2493, "total_steps": 3906, "loss": 1.3457, "learning_rate": 1.1798014620748266e-05, "epoch": 0.638208, "percentage": 63.82, "elapsed_time": "9:09:50", "remaining_time": "5:11:38"}
|
2494 |
+
{"current_steps": 2494, "total_steps": 3906, "loss": 1.3484, "learning_rate": 1.1783194459464272e-05, "epoch": 0.638464, "percentage": 63.85, "elapsed_time": "9:10:04", "remaining_time": "5:11:25"}
|
2495 |
+
{"current_steps": 2495, "total_steps": 3906, "loss": 1.333, "learning_rate": 1.1768379724170811e-05, "epoch": 0.63872, "percentage": 63.88, "elapsed_time": "9:10:17", "remaining_time": "5:11:12"}
|
2496 |
+
{"current_steps": 2496, "total_steps": 3906, "loss": 1.382, "learning_rate": 1.1753570424650822e-05, "epoch": 0.638976, "percentage": 63.9, "elapsed_time": "9:10:30", "remaining_time": "5:10:59"}
|
2497 |
+
{"current_steps": 2497, "total_steps": 3906, "loss": 1.3089, "learning_rate": 1.1738766570683673e-05, "epoch": 0.639232, "percentage": 63.93, "elapsed_time": "9:10:43", "remaining_time": "5:10:45"}
|
2498 |
+
{"current_steps": 2498, "total_steps": 3906, "loss": 1.3409, "learning_rate": 1.172396817204513e-05, "epoch": 0.639488, "percentage": 63.95, "elapsed_time": "9:10:56", "remaining_time": "5:10:32"}
|
2499 |
+
{"current_steps": 2499, "total_steps": 3906, "loss": 1.3399, "learning_rate": 1.1709175238507364e-05, "epoch": 0.639744, "percentage": 63.98, "elapsed_time": "9:11:10", "remaining_time": "5:10:19"}
|
2500 |
+
{"current_steps": 2500, "total_steps": 3906, "loss": 1.2926, "learning_rate": 1.169438777983892e-05, "epoch": 0.64, "percentage": 64.0, "elapsed_time": "9:11:23", "remaining_time": "5:10:06"}
|
2501 |
+
{"current_steps": 2501, "total_steps": 3906, "loss": 1.3431, "learning_rate": 1.1679605805804748e-05, "epoch": 0.640256, "percentage": 64.03, "elapsed_time": "9:11:36", "remaining_time": "5:09:52"}
|
2502 |
+
{"current_steps": 2502, "total_steps": 3906, "loss": 1.3706, "learning_rate": 1.1664829326166154e-05, "epoch": 0.640512, "percentage": 64.06, "elapsed_time": "9:11:49", "remaining_time": "5:09:39"}
|
2503 |
+
{"current_steps": 2503, "total_steps": 3906, "loss": 1.3487, "learning_rate": 1.1650058350680831e-05, "epoch": 0.640768, "percentage": 64.08, "elapsed_time": "9:12:02", "remaining_time": "5:09:26"}
|
2504 |
+
{"current_steps": 2504, "total_steps": 3906, "loss": 1.3599, "learning_rate": 1.1635292889102834e-05, "epoch": 0.641024, "percentage": 64.11, "elapsed_time": "9:12:16", "remaining_time": "5:09:13"}
|
2505 |
+
{"current_steps": 2505, "total_steps": 3906, "loss": 1.3526, "learning_rate": 1.1620532951182584e-05, "epoch": 0.64128, "percentage": 64.13, "elapsed_time": "9:12:29", "remaining_time": "5:08:59"}
|
2506 |
+
{"current_steps": 2506, "total_steps": 3906, "loss": 1.287, "learning_rate": 1.1605778546666846e-05, "epoch": 0.641536, "percentage": 64.16, "elapsed_time": "9:12:42", "remaining_time": "5:08:46"}
|
2507 |
+
{"current_steps": 2507, "total_steps": 3906, "loss": 1.351, "learning_rate": 1.1591029685298725e-05, "epoch": 0.641792, "percentage": 64.18, "elapsed_time": "9:12:55", "remaining_time": "5:08:33"}
|
2508 |
+
{"current_steps": 2508, "total_steps": 3906, "loss": 1.3599, "learning_rate": 1.1576286376817682e-05, "epoch": 0.642048, "percentage": 64.21, "elapsed_time": "9:13:08", "remaining_time": "5:08:20"}
|
2509 |
+
{"current_steps": 2509, "total_steps": 3906, "loss": 1.3467, "learning_rate": 1.156154863095949e-05, "epoch": 0.642304, "percentage": 64.23, "elapsed_time": "9:13:22", "remaining_time": "5:08:06"}
|
2510 |
+
{"current_steps": 2510, "total_steps": 3906, "loss": 1.3285, "learning_rate": 1.1546816457456265e-05, "epoch": 0.64256, "percentage": 64.26, "elapsed_time": "9:13:35", "remaining_time": "5:07:53"}
|
2511 |
+
{"current_steps": 2511, "total_steps": 3906, "loss": 1.3281, "learning_rate": 1.1532089866036442e-05, "epoch": 0.642816, "percentage": 64.29, "elapsed_time": "9:13:48", "remaining_time": "5:07:40"}
|
2512 |
+
{"current_steps": 2512, "total_steps": 3906, "loss": 1.3665, "learning_rate": 1.1517368866424767e-05, "epoch": 0.643072, "percentage": 64.31, "elapsed_time": "9:14:01", "remaining_time": "5:07:27"}
|
2513 |
+
{"current_steps": 2513, "total_steps": 3906, "loss": 1.3007, "learning_rate": 1.15026534683423e-05, "epoch": 0.643328, "percentage": 64.34, "elapsed_time": "9:14:14", "remaining_time": "5:07:13"}
|
2514 |
+
{"current_steps": 2514, "total_steps": 3906, "loss": 1.3333, "learning_rate": 1.1487943681506382e-05, "epoch": 0.643584, "percentage": 64.36, "elapsed_time": "9:14:28", "remaining_time": "5:07:00"}
|
2515 |
+
{"current_steps": 2515, "total_steps": 3906, "loss": 1.2957, "learning_rate": 1.1473239515630663e-05, "epoch": 0.64384, "percentage": 64.39, "elapsed_time": "9:14:41", "remaining_time": "5:06:47"}
|
2516 |
+
{"current_steps": 2516, "total_steps": 3906, "loss": 1.3152, "learning_rate": 1.1458540980425088e-05, "epoch": 0.644096, "percentage": 64.41, "elapsed_time": "9:14:54", "remaining_time": "5:06:33"}
|
2517 |
+
{"current_steps": 2517, "total_steps": 3906, "loss": 1.3456, "learning_rate": 1.144384808559587e-05, "epoch": 0.644352, "percentage": 64.44, "elapsed_time": "9:15:07", "remaining_time": "5:06:20"}
|
2518 |
+
{"current_steps": 2518, "total_steps": 3906, "loss": 1.354, "learning_rate": 1.1429160840845515e-05, "epoch": 0.644608, "percentage": 64.46, "elapsed_time": "9:15:20", "remaining_time": "5:06:07"}
|
2519 |
+
{"current_steps": 2519, "total_steps": 3906, "loss": 1.3367, "learning_rate": 1.141447925587277e-05, "epoch": 0.644864, "percentage": 64.49, "elapsed_time": "9:15:33", "remaining_time": "5:05:54"}
|
2520 |
+
{"current_steps": 2520, "total_steps": 3906, "loss": 1.334, "learning_rate": 1.1399803340372672e-05, "epoch": 0.64512, "percentage": 64.52, "elapsed_time": "9:15:47", "remaining_time": "5:05:40"}
|
2521 |
+
{"current_steps": 2521, "total_steps": 3906, "loss": 1.3497, "learning_rate": 1.1385133104036497e-05, "epoch": 0.645376, "percentage": 64.54, "elapsed_time": "9:16:00", "remaining_time": "5:05:27"}
|
2522 |
+
{"current_steps": 2522, "total_steps": 3906, "loss": 1.3819, "learning_rate": 1.1370468556551773e-05, "epoch": 0.645632, "percentage": 64.57, "elapsed_time": "9:16:13", "remaining_time": "5:05:14"}
|
2523 |
+
{"current_steps": 2523, "total_steps": 3906, "loss": 1.3388, "learning_rate": 1.1355809707602278e-05, "epoch": 0.645888, "percentage": 64.59, "elapsed_time": "9:16:26", "remaining_time": "5:05:01"}
|
2524 |
+
{"current_steps": 2524, "total_steps": 3906, "loss": 1.3192, "learning_rate": 1.1341156566868032e-05, "epoch": 0.646144, "percentage": 64.62, "elapsed_time": "9:16:39", "remaining_time": "5:04:47"}
|
2525 |
+
{"current_steps": 2525, "total_steps": 3906, "loss": 1.3473, "learning_rate": 1.1326509144025275e-05, "epoch": 0.6464, "percentage": 64.64, "elapsed_time": "9:16:53", "remaining_time": "5:04:34"}
|
2526 |
+
{"current_steps": 2526, "total_steps": 3906, "loss": 1.3234, "learning_rate": 1.1311867448746464e-05, "epoch": 0.646656, "percentage": 64.67, "elapsed_time": "9:17:06", "remaining_time": "5:04:21"}
|
2527 |
+
{"current_steps": 2527, "total_steps": 3906, "loss": 1.3501, "learning_rate": 1.1297231490700287e-05, "epoch": 0.646912, "percentage": 64.7, "elapsed_time": "9:17:19", "remaining_time": "5:04:08"}
|
2528 |
+
{"current_steps": 2528, "total_steps": 3906, "loss": 1.3368, "learning_rate": 1.1282601279551644e-05, "epoch": 0.647168, "percentage": 64.72, "elapsed_time": "9:17:32", "remaining_time": "5:03:54"}
|
2529 |
+
{"current_steps": 2529, "total_steps": 3906, "loss": 1.3246, "learning_rate": 1.1267976824961644e-05, "epoch": 0.647424, "percentage": 64.75, "elapsed_time": "9:17:45", "remaining_time": "5:03:41"}
|
2530 |
+
{"current_steps": 2530, "total_steps": 3906, "loss": 1.3336, "learning_rate": 1.1253358136587566e-05, "epoch": 0.64768, "percentage": 64.77, "elapsed_time": "9:17:59", "remaining_time": "5:03:28"}
|
2531 |
+
{"current_steps": 2531, "total_steps": 3906, "loss": 1.3144, "learning_rate": 1.1238745224082921e-05, "epoch": 0.647936, "percentage": 64.8, "elapsed_time": "9:18:12", "remaining_time": "5:03:15"}
|
2532 |
+
{"current_steps": 2532, "total_steps": 3906, "loss": 1.3599, "learning_rate": 1.1224138097097371e-05, "epoch": 0.648192, "percentage": 64.82, "elapsed_time": "9:18:25", "remaining_time": "5:03:01"}
|
2533 |
+
{"current_steps": 2533, "total_steps": 3906, "loss": 1.3565, "learning_rate": 1.120953676527678e-05, "epoch": 0.648448, "percentage": 64.85, "elapsed_time": "9:18:38", "remaining_time": "5:02:48"}
|
2534 |
+
{"current_steps": 2534, "total_steps": 3906, "loss": 1.2984, "learning_rate": 1.1194941238263177e-05, "epoch": 0.648704, "percentage": 64.87, "elapsed_time": "9:18:51", "remaining_time": "5:02:35"}
|
2535 |
+
{"current_steps": 2535, "total_steps": 3906, "loss": 1.3585, "learning_rate": 1.1180351525694761e-05, "epoch": 0.64896, "percentage": 64.9, "elapsed_time": "9:19:04", "remaining_time": "5:02:22"}
|
2536 |
+
{"current_steps": 2536, "total_steps": 3906, "loss": 1.3458, "learning_rate": 1.1165767637205894e-05, "epoch": 0.649216, "percentage": 64.93, "elapsed_time": "9:19:18", "remaining_time": "5:02:08"}
|
2537 |
+
{"current_steps": 2537, "total_steps": 3906, "loss": 1.3554, "learning_rate": 1.1151189582427077e-05, "epoch": 0.649472, "percentage": 64.95, "elapsed_time": "9:19:31", "remaining_time": "5:01:55"}
|
2538 |
+
{"current_steps": 2538, "total_steps": 3906, "loss": 1.3424, "learning_rate": 1.1136617370984984e-05, "epoch": 0.649728, "percentage": 64.98, "elapsed_time": "9:19:44", "remaining_time": "5:01:42"}
|
2539 |
+
{"current_steps": 2539, "total_steps": 3906, "loss": 1.3282, "learning_rate": 1.1122051012502402e-05, "epoch": 0.649984, "percentage": 65.0, "elapsed_time": "9:19:57", "remaining_time": "5:01:28"}
|
2540 |
+
{"current_steps": 2540, "total_steps": 3906, "loss": 1.3516, "learning_rate": 1.1107490516598267e-05, "epoch": 0.65024, "percentage": 65.03, "elapsed_time": "9:20:10", "remaining_time": "5:01:15"}
|
2541 |
+
{"current_steps": 2541, "total_steps": 3906, "loss": 1.3298, "learning_rate": 1.1092935892887654e-05, "epoch": 0.650496, "percentage": 65.05, "elapsed_time": "9:20:23", "remaining_time": "5:01:02"}
|
2542 |
+
{"current_steps": 2542, "total_steps": 3906, "loss": 1.3313, "learning_rate": 1.1078387150981742e-05, "epoch": 0.650752, "percentage": 65.08, "elapsed_time": "9:20:37", "remaining_time": "5:00:49"}
|
2543 |
+
{"current_steps": 2543, "total_steps": 3906, "loss": 1.3521, "learning_rate": 1.106384430048783e-05, "epoch": 0.651008, "percentage": 65.1, "elapsed_time": "9:20:50", "remaining_time": "5:00:35"}
|
2544 |
+
{"current_steps": 2544, "total_steps": 3906, "loss": 1.3529, "learning_rate": 1.1049307351009342e-05, "epoch": 0.651264, "percentage": 65.13, "elapsed_time": "9:21:03", "remaining_time": "5:00:22"}
|
2545 |
+
{"current_steps": 2545, "total_steps": 3906, "loss": 1.3264, "learning_rate": 1.1034776312145779e-05, "epoch": 0.65152, "percentage": 65.16, "elapsed_time": "9:21:16", "remaining_time": "5:00:09"}
|
2546 |
+
{"current_steps": 2546, "total_steps": 3906, "loss": 1.3455, "learning_rate": 1.1020251193492757e-05, "epoch": 0.651776, "percentage": 65.18, "elapsed_time": "9:21:29", "remaining_time": "4:59:55"}
|
2547 |
+
{"current_steps": 2547, "total_steps": 3906, "loss": 1.3635, "learning_rate": 1.1005732004641984e-05, "epoch": 0.652032, "percentage": 65.21, "elapsed_time": "9:21:42", "remaining_time": "4:59:42"}
|
2548 |
+
{"current_steps": 2548, "total_steps": 3906, "loss": 1.3832, "learning_rate": 1.0991218755181243e-05, "epoch": 0.652288, "percentage": 65.23, "elapsed_time": "9:21:55", "remaining_time": "4:59:29"}
|
2549 |
+
{"current_steps": 2549, "total_steps": 3906, "loss": 1.3361, "learning_rate": 1.0976711454694402e-05, "epoch": 0.652544, "percentage": 65.26, "elapsed_time": "9:22:09", "remaining_time": "4:59:16"}
|
2550 |
+
{"current_steps": 2550, "total_steps": 3906, "loss": 1.3177, "learning_rate": 1.0962210112761402e-05, "epoch": 0.6528, "percentage": 65.28, "elapsed_time": "9:22:22", "remaining_time": "4:59:02"}
|
2551 |
+
{"current_steps": 2551, "total_steps": 3906, "loss": 1.3671, "learning_rate": 1.0947714738958233e-05, "epoch": 0.653056, "percentage": 65.31, "elapsed_time": "9:22:35", "remaining_time": "4:58:49"}
|
2552 |
+
{"current_steps": 2552, "total_steps": 3906, "loss": 1.3123, "learning_rate": 1.0933225342856966e-05, "epoch": 0.653312, "percentage": 65.34, "elapsed_time": "9:22:48", "remaining_time": "4:58:36"}
|
2553 |
+
{"current_steps": 2553, "total_steps": 3906, "loss": 1.3228, "learning_rate": 1.0918741934025702e-05, "epoch": 0.653568, "percentage": 65.36, "elapsed_time": "9:23:01", "remaining_time": "4:58:23"}
|
2554 |
+
{"current_steps": 2554, "total_steps": 3906, "loss": 1.329, "learning_rate": 1.090426452202861e-05, "epoch": 0.653824, "percentage": 65.39, "elapsed_time": "9:23:15", "remaining_time": "4:58:09"}
|
2555 |
+
{"current_steps": 2555, "total_steps": 3906, "loss": 1.3878, "learning_rate": 1.0889793116425884e-05, "epoch": 0.65408, "percentage": 65.41, "elapsed_time": "9:23:28", "remaining_time": "4:57:56"}
|
2556 |
+
{"current_steps": 2556, "total_steps": 3906, "loss": 1.3181, "learning_rate": 1.0875327726773772e-05, "epoch": 0.654336, "percentage": 65.44, "elapsed_time": "9:23:41", "remaining_time": "4:57:43"}
|
2557 |
+
{"current_steps": 2557, "total_steps": 3906, "loss": 1.3469, "learning_rate": 1.0860868362624516e-05, "epoch": 0.654592, "percentage": 65.46, "elapsed_time": "9:23:54", "remaining_time": "4:57:30"}
|
2558 |
+
{"current_steps": 2558, "total_steps": 3906, "loss": 1.3505, "learning_rate": 1.08464150335264e-05, "epoch": 0.654848, "percentage": 65.49, "elapsed_time": "9:24:07", "remaining_time": "4:57:16"}
|
2559 |
+
{"current_steps": 2559, "total_steps": 3906, "loss": 1.3783, "learning_rate": 1.083196774902373e-05, "epoch": 0.655104, "percentage": 65.51, "elapsed_time": "9:24:21", "remaining_time": "4:57:03"}
|
2560 |
+
{"current_steps": 2560, "total_steps": 3906, "loss": 1.3488, "learning_rate": 1.0817526518656802e-05, "epoch": 0.65536, "percentage": 65.54, "elapsed_time": "9:24:34", "remaining_time": "4:56:50"}
|
2561 |
+
{"current_steps": 2561, "total_steps": 3906, "loss": 1.3305, "learning_rate": 1.0803091351961927e-05, "epoch": 0.655616, "percentage": 65.57, "elapsed_time": "9:24:47", "remaining_time": "4:56:37"}
|
2562 |
+
{"current_steps": 2562, "total_steps": 3906, "loss": 1.3636, "learning_rate": 1.0788662258471418e-05, "epoch": 0.655872, "percentage": 65.59, "elapsed_time": "9:25:00", "remaining_time": "4:56:23"}
|
2563 |
+
{"current_steps": 2563, "total_steps": 3906, "loss": 1.3667, "learning_rate": 1.0774239247713546e-05, "epoch": 0.656128, "percentage": 65.62, "elapsed_time": "9:25:13", "remaining_time": "4:56:10"}
|
2564 |
+
{"current_steps": 2564, "total_steps": 3906, "loss": 1.3573, "learning_rate": 1.0759822329212608e-05, "epoch": 0.656384, "percentage": 65.64, "elapsed_time": "9:25:27", "remaining_time": "4:55:57"}
|
2565 |
+
{"current_steps": 2565, "total_steps": 3906, "loss": 1.3154, "learning_rate": 1.0745411512488835e-05, "epoch": 0.65664, "percentage": 65.67, "elapsed_time": "9:25:40", "remaining_time": "4:55:44"}
|
2566 |
+
{"current_steps": 2566, "total_steps": 3906, "loss": 1.3204, "learning_rate": 1.0731006807058461e-05, "epoch": 0.656896, "percentage": 65.69, "elapsed_time": "9:25:53", "remaining_time": "4:55:30"}
|
2567 |
+
{"current_steps": 2567, "total_steps": 3906, "loss": 1.3833, "learning_rate": 1.0716608222433673e-05, "epoch": 0.657152, "percentage": 65.72, "elapsed_time": "9:26:06", "remaining_time": "4:55:17"}
|
2568 |
+
{"current_steps": 2568, "total_steps": 3906, "loss": 1.3484, "learning_rate": 1.0702215768122626e-05, "epoch": 0.657408, "percentage": 65.75, "elapsed_time": "9:26:19", "remaining_time": "4:55:04"}
|
2569 |
+
{"current_steps": 2569, "total_steps": 3906, "loss": 1.3347, "learning_rate": 1.0687829453629402e-05, "epoch": 0.657664, "percentage": 65.77, "elapsed_time": "9:26:32", "remaining_time": "4:54:51"}
|
2570 |
+
{"current_steps": 2570, "total_steps": 3906, "loss": 1.3377, "learning_rate": 1.0673449288454047e-05, "epoch": 0.65792, "percentage": 65.8, "elapsed_time": "9:26:46", "remaining_time": "4:54:37"}
|
2571 |
+
{"current_steps": 2571, "total_steps": 3906, "loss": 1.3255, "learning_rate": 1.0659075282092549e-05, "epoch": 0.658176, "percentage": 65.82, "elapsed_time": "9:26:59", "remaining_time": "4:54:24"}
|
2572 |
+
{"current_steps": 2572, "total_steps": 3906, "loss": 1.3528, "learning_rate": 1.064470744403682e-05, "epoch": 0.658432, "percentage": 65.85, "elapsed_time": "9:27:12", "remaining_time": "4:54:11"}
|
2573 |
+
{"current_steps": 2573, "total_steps": 3906, "loss": 1.3638, "learning_rate": 1.0630345783774703e-05, "epoch": 0.658688, "percentage": 65.87, "elapsed_time": "9:27:25", "remaining_time": "4:53:58"}
|
2574 |
+
{"current_steps": 2574, "total_steps": 3906, "loss": 1.3198, "learning_rate": 1.061599031078997e-05, "epoch": 0.658944, "percentage": 65.9, "elapsed_time": "9:27:38", "remaining_time": "4:53:44"}
|
2575 |
+
{"current_steps": 2575, "total_steps": 3906, "loss": 1.3598, "learning_rate": 1.0601641034562284e-05, "epoch": 0.6592, "percentage": 65.92, "elapsed_time": "9:27:52", "remaining_time": "4:53:31"}
|
2576 |
+
{"current_steps": 2576, "total_steps": 3906, "loss": 1.3647, "learning_rate": 1.0587297964567228e-05, "epoch": 0.659456, "percentage": 65.95, "elapsed_time": "9:28:05", "remaining_time": "4:53:18"}
|
2577 |
+
{"current_steps": 2577, "total_steps": 3906, "loss": 1.3661, "learning_rate": 1.0572961110276299e-05, "epoch": 0.659712, "percentage": 65.98, "elapsed_time": "9:28:18", "remaining_time": "4:53:05"}
|
2578 |
+
{"current_steps": 2578, "total_steps": 3906, "loss": 1.3853, "learning_rate": 1.055863048115687e-05, "epoch": 0.659968, "percentage": 66.0, "elapsed_time": "9:28:31", "remaining_time": "4:52:51"}
|
2579 |
+
{"current_steps": 2579, "total_steps": 3906, "loss": 1.3343, "learning_rate": 1.0544306086672216e-05, "epoch": 0.660224, "percentage": 66.03, "elapsed_time": "9:28:45", "remaining_time": "4:52:38"}
|
2580 |
+
{"current_steps": 2580, "total_steps": 3906, "loss": 1.3605, "learning_rate": 1.05299879362815e-05, "epoch": 0.66048, "percentage": 66.05, "elapsed_time": "9:28:58", "remaining_time": "4:52:25"}
|
2581 |
+
{"current_steps": 2581, "total_steps": 3906, "loss": 1.3275, "learning_rate": 1.0515676039439737e-05, "epoch": 0.660736, "percentage": 66.08, "elapsed_time": "9:29:11", "remaining_time": "4:52:12"}
|
2582 |
+
{"current_steps": 2582, "total_steps": 3906, "loss": 1.287, "learning_rate": 1.0501370405597834e-05, "epoch": 0.660992, "percentage": 66.1, "elapsed_time": "9:29:24", "remaining_time": "4:51:58"}
|
2583 |
+
{"current_steps": 2583, "total_steps": 3906, "loss": 1.3191, "learning_rate": 1.0487071044202559e-05, "epoch": 0.661248, "percentage": 66.13, "elapsed_time": "9:29:37", "remaining_time": "4:51:45"}
|
2584 |
+
{"current_steps": 2584, "total_steps": 3906, "loss": 1.3324, "learning_rate": 1.0472777964696533e-05, "epoch": 0.661504, "percentage": 66.15, "elapsed_time": "9:29:50", "remaining_time": "4:51:32"}
|
2585 |
+
{"current_steps": 2585, "total_steps": 3906, "loss": 1.3046, "learning_rate": 1.0458491176518238e-05, "epoch": 0.66176, "percentage": 66.18, "elapsed_time": "9:30:04", "remaining_time": "4:51:19"}
|
2586 |
+
{"current_steps": 2586, "total_steps": 3906, "loss": 1.3538, "learning_rate": 1.0444210689101982e-05, "epoch": 0.662016, "percentage": 66.21, "elapsed_time": "9:30:17", "remaining_time": "4:51:05"}
|
2587 |
+
{"current_steps": 2587, "total_steps": 3906, "loss": 1.364, "learning_rate": 1.0429936511877936e-05, "epoch": 0.662272, "percentage": 66.23, "elapsed_time": "9:30:30", "remaining_time": "4:50:52"}
|
2588 |
+
{"current_steps": 2588, "total_steps": 3906, "loss": 1.369, "learning_rate": 1.0415668654272084e-05, "epoch": 0.662528, "percentage": 66.26, "elapsed_time": "9:30:43", "remaining_time": "4:50:39"}
|
2589 |
+
{"current_steps": 2589, "total_steps": 3906, "loss": 1.3679, "learning_rate": 1.0401407125706248e-05, "epoch": 0.662784, "percentage": 66.28, "elapsed_time": "9:30:56", "remaining_time": "4:50:26"}
|
2590 |
+
{"current_steps": 2590, "total_steps": 3906, "loss": 1.3647, "learning_rate": 1.038715193559807e-05, "epoch": 0.66304, "percentage": 66.31, "elapsed_time": "9:31:10", "remaining_time": "4:50:12"}
|
2591 |
+
{"current_steps": 2591, "total_steps": 3906, "loss": 1.3758, "learning_rate": 1.0372903093361005e-05, "epoch": 0.663296, "percentage": 66.33, "elapsed_time": "9:31:23", "remaining_time": "4:49:59"}
|
2592 |
+
{"current_steps": 2592, "total_steps": 3906, "loss": 1.3602, "learning_rate": 1.0358660608404323e-05, "epoch": 0.663552, "percentage": 66.36, "elapsed_time": "9:31:36", "remaining_time": "4:49:46"}
|
2593 |
+
{"current_steps": 2593, "total_steps": 3906, "loss": 1.3277, "learning_rate": 1.0344424490133073e-05, "epoch": 0.663808, "percentage": 66.39, "elapsed_time": "9:31:49", "remaining_time": "4:49:33"}
|
2594 |
+
{"current_steps": 2594, "total_steps": 3906, "loss": 1.3443, "learning_rate": 1.0330194747948124e-05, "epoch": 0.664064, "percentage": 66.41, "elapsed_time": "9:32:02", "remaining_time": "4:49:19"}
|
2595 |
+
{"current_steps": 2595, "total_steps": 3906, "loss": 1.2863, "learning_rate": 1.0315971391246124e-05, "epoch": 0.66432, "percentage": 66.44, "elapsed_time": "9:32:16", "remaining_time": "4:49:06"}
|
2596 |
+
{"current_steps": 2596, "total_steps": 3906, "loss": 1.366, "learning_rate": 1.030175442941951e-05, "epoch": 0.664576, "percentage": 66.46, "elapsed_time": "9:32:29", "remaining_time": "4:48:53"}
|
2597 |
+
{"current_steps": 2597, "total_steps": 3906, "loss": 1.3282, "learning_rate": 1.02875438718565e-05, "epoch": 0.664832, "percentage": 66.49, "elapsed_time": "9:32:42", "remaining_time": "4:48:40"}
|
2598 |
+
{"current_steps": 2598, "total_steps": 3906, "loss": 1.3676, "learning_rate": 1.0273339727941059e-05, "epoch": 0.665088, "percentage": 66.51, "elapsed_time": "9:32:55", "remaining_time": "4:48:26"}
|
2599 |
+
{"current_steps": 2599, "total_steps": 3906, "loss": 1.3357, "learning_rate": 1.0259142007052945e-05, "epoch": 0.665344, "percentage": 66.54, "elapsed_time": "9:33:08", "remaining_time": "4:48:13"}
|
2600 |
+
{"current_steps": 2600, "total_steps": 3906, "loss": 1.3207, "learning_rate": 1.024495071856765e-05, "epoch": 0.6656, "percentage": 66.56, "elapsed_time": "9:33:22", "remaining_time": "4:48:00"}
|
2601 |
+
{"current_steps": 2601, "total_steps": 3906, "loss": 1.3836, "learning_rate": 1.023076587185644e-05, "epoch": 0.665856, "percentage": 66.59, "elapsed_time": "9:33:35", "remaining_time": "4:47:47"}
|
2602 |
+
{"current_steps": 2602, "total_steps": 3906, "loss": 1.3939, "learning_rate": 1.0216587476286319e-05, "epoch": 0.666112, "percentage": 66.62, "elapsed_time": "9:33:48", "remaining_time": "4:47:33"}
|
2603 |
+
{"current_steps": 2603, "total_steps": 3906, "loss": 1.3663, "learning_rate": 1.0202415541220025e-05, "epoch": 0.666368, "percentage": 66.64, "elapsed_time": "9:34:01", "remaining_time": "4:47:20"}
|
2604 |
+
{"current_steps": 2604, "total_steps": 3906, "loss": 1.3276, "learning_rate": 1.0188250076016042e-05, "epoch": 0.666624, "percentage": 66.67, "elapsed_time": "9:34:14", "remaining_time": "4:47:07"}
|
2605 |
+
{"current_steps": 2605, "total_steps": 3906, "loss": 1.3145, "learning_rate": 1.0174091090028577e-05, "epoch": 0.66688, "percentage": 66.69, "elapsed_time": "9:34:27", "remaining_time": "4:46:54"}
|
2606 |
+
{"current_steps": 2606, "total_steps": 3906, "loss": 1.3508, "learning_rate": 1.0159938592607543e-05, "epoch": 0.667136, "percentage": 66.72, "elapsed_time": "9:34:41", "remaining_time": "4:46:40"}
|
2607 |
+
{"current_steps": 2607, "total_steps": 3906, "loss": 1.3776, "learning_rate": 1.0145792593098589e-05, "epoch": 0.667392, "percentage": 66.74, "elapsed_time": "9:34:54", "remaining_time": "4:46:27"}
|
2608 |
+
{"current_steps": 2608, "total_steps": 3906, "loss": 1.3116, "learning_rate": 1.013165310084307e-05, "epoch": 0.667648, "percentage": 66.77, "elapsed_time": "9:35:07", "remaining_time": "4:46:14"}
|
2609 |
+
{"current_steps": 2609, "total_steps": 3906, "loss": 1.2854, "learning_rate": 1.0117520125178028e-05, "epoch": 0.667904, "percentage": 66.79, "elapsed_time": "9:35:20", "remaining_time": "4:46:01"}
|
2610 |
+
{"current_steps": 2610, "total_steps": 3906, "loss": 1.3395, "learning_rate": 1.010339367543622e-05, "epoch": 0.66816, "percentage": 66.82, "elapsed_time": "9:35:33", "remaining_time": "4:45:47"}
|
2611 |
+
{"current_steps": 2611, "total_steps": 3906, "loss": 1.3446, "learning_rate": 1.008927376094609e-05, "epoch": 0.668416, "percentage": 66.85, "elapsed_time": "9:35:47", "remaining_time": "4:45:34"}
|
2612 |
+
{"current_steps": 2612, "total_steps": 3906, "loss": 1.3465, "learning_rate": 1.0075160391031752e-05, "epoch": 0.668672, "percentage": 66.87, "elapsed_time": "9:36:00", "remaining_time": "4:45:21"}
|
2613 |
+
{"current_steps": 2613, "total_steps": 3906, "loss": 1.3687, "learning_rate": 1.0061053575013018e-05, "epoch": 0.668928, "percentage": 66.9, "elapsed_time": "9:36:13", "remaining_time": "4:45:08"}
|
2614 |
+
{"current_steps": 2614, "total_steps": 3906, "loss": 1.321, "learning_rate": 1.004695332220536e-05, "epoch": 0.669184, "percentage": 66.92, "elapsed_time": "9:36:26", "remaining_time": "4:44:54"}
|
2615 |
+
{"current_steps": 2615, "total_steps": 3906, "loss": 1.3528, "learning_rate": 1.0032859641919919e-05, "epoch": 0.66944, "percentage": 66.95, "elapsed_time": "9:36:39", "remaining_time": "4:44:41"}
|
2616 |
+
{"current_steps": 2616, "total_steps": 3906, "loss": 1.3051, "learning_rate": 1.00187725434635e-05, "epoch": 0.669696, "percentage": 66.97, "elapsed_time": "9:36:52", "remaining_time": "4:44:28"}
|
2617 |
+
{"current_steps": 2617, "total_steps": 3906, "loss": 1.3577, "learning_rate": 1.0004692036138563e-05, "epoch": 0.669952, "percentage": 67.0, "elapsed_time": "9:37:06", "remaining_time": "4:44:15"}
|
2618 |
+
{"current_steps": 2618, "total_steps": 3906, "loss": 1.3066, "learning_rate": 9.990618129243196e-06, "epoch": 0.670208, "percentage": 67.03, "elapsed_time": "9:37:19", "remaining_time": "4:44:01"}
|
2619 |
+
{"current_steps": 2619, "total_steps": 3906, "loss": 1.3609, "learning_rate": 9.976550832071147e-06, "epoch": 0.670464, "percentage": 67.05, "elapsed_time": "9:37:32", "remaining_time": "4:43:48"}
|
2620 |
+
{"current_steps": 2620, "total_steps": 3906, "loss": 1.3455, "learning_rate": 9.962490153911808e-06, "epoch": 0.67072, "percentage": 67.08, "elapsed_time": "9:37:45", "remaining_time": "4:43:35"}
|
2621 |
+
{"current_steps": 2621, "total_steps": 3906, "loss": 1.293, "learning_rate": 9.94843610405017e-06, "epoch": 0.670976, "percentage": 67.1, "elapsed_time": "9:37:58", "remaining_time": "4:43:22"}
|
2622 |
+
{"current_steps": 2622, "total_steps": 3906, "loss": 1.3299, "learning_rate": 9.934388691766871e-06, "epoch": 0.671232, "percentage": 67.13, "elapsed_time": "9:38:12", "remaining_time": "4:43:08"}
|
2623 |
+
{"current_steps": 2623, "total_steps": 3906, "loss": 1.3686, "learning_rate": 9.920347926338168e-06, "epoch": 0.671488, "percentage": 67.15, "elapsed_time": "9:38:25", "remaining_time": "4:42:55"}
|
2624 |
+
{"current_steps": 2624, "total_steps": 3906, "loss": 1.3142, "learning_rate": 9.906313817035901e-06, "epoch": 0.671744, "percentage": 67.18, "elapsed_time": "9:38:38", "remaining_time": "4:42:42"}
|
2625 |
+
{"current_steps": 2625, "total_steps": 3906, "loss": 1.3679, "learning_rate": 9.892286373127544e-06, "epoch": 0.672, "percentage": 67.2, "elapsed_time": "9:38:51", "remaining_time": "4:42:29"}
|
2626 |
+
{"current_steps": 2626, "total_steps": 3906, "loss": 1.3243, "learning_rate": 9.87826560387616e-06, "epoch": 0.672256, "percentage": 67.23, "elapsed_time": "9:39:04", "remaining_time": "4:42:15"}
|
2627 |
+
{"current_steps": 2627, "total_steps": 3906, "loss": 1.3056, "learning_rate": 9.8642515185404e-06, "epoch": 0.672512, "percentage": 67.26, "elapsed_time": "9:39:17", "remaining_time": "4:42:02"}
|
2628 |
+
{"current_steps": 2628, "total_steps": 3906, "loss": 1.3158, "learning_rate": 9.850244126374507e-06, "epoch": 0.672768, "percentage": 67.28, "elapsed_time": "9:39:31", "remaining_time": "4:41:49"}
|
2629 |
+
{"current_steps": 2629, "total_steps": 3906, "loss": 1.378, "learning_rate": 9.836243436628308e-06, "epoch": 0.673024, "percentage": 67.31, "elapsed_time": "9:39:44", "remaining_time": "4:41:35"}
|
2630 |
+
{"current_steps": 2630, "total_steps": 3906, "loss": 1.3186, "learning_rate": 9.822249458547181e-06, "epoch": 0.67328, "percentage": 67.33, "elapsed_time": "9:39:57", "remaining_time": "4:41:22"}
|
2631 |
+
{"current_steps": 2631, "total_steps": 3906, "loss": 1.3783, "learning_rate": 9.808262201372105e-06, "epoch": 0.673536, "percentage": 67.36, "elapsed_time": "9:40:10", "remaining_time": "4:41:09"}
|
2632 |
+
{"current_steps": 2632, "total_steps": 3906, "loss": 1.3345, "learning_rate": 9.794281674339592e-06, "epoch": 0.673792, "percentage": 67.38, "elapsed_time": "9:40:23", "remaining_time": "4:40:56"}
|
2633 |
+
{"current_steps": 2633, "total_steps": 3906, "loss": 1.3704, "learning_rate": 9.780307886681725e-06, "epoch": 0.674048, "percentage": 67.41, "elapsed_time": "9:40:37", "remaining_time": "4:40:43"}
|
2634 |
+
{"current_steps": 2634, "total_steps": 3906, "loss": 1.371, "learning_rate": 9.766340847626137e-06, "epoch": 0.674304, "percentage": 67.43, "elapsed_time": "9:40:50", "remaining_time": "4:40:29"}
|
2635 |
+
{"current_steps": 2635, "total_steps": 3906, "loss": 1.2962, "learning_rate": 9.752380566396009e-06, "epoch": 0.67456, "percentage": 67.46, "elapsed_time": "9:41:03", "remaining_time": "4:40:16"}
|
2636 |
+
{"current_steps": 2636, "total_steps": 3906, "loss": 1.284, "learning_rate": 9.738427052210034e-06, "epoch": 0.674816, "percentage": 67.49, "elapsed_time": "9:41:16", "remaining_time": "4:40:03"}
|
2637 |
+
{"current_steps": 2637, "total_steps": 3906, "loss": 1.3249, "learning_rate": 9.724480314282463e-06, "epoch": 0.675072, "percentage": 67.51, "elapsed_time": "9:41:29", "remaining_time": "4:39:49"}
|
2638 |
+
{"current_steps": 2638, "total_steps": 3906, "loss": 1.3601, "learning_rate": 9.710540361823063e-06, "epoch": 0.675328, "percentage": 67.54, "elapsed_time": "9:41:42", "remaining_time": "4:39:36"}
|
2639 |
+
{"current_steps": 2639, "total_steps": 3906, "loss": 1.3774, "learning_rate": 9.696607204037124e-06, "epoch": 0.675584, "percentage": 67.56, "elapsed_time": "9:41:56", "remaining_time": "4:39:23"}
|
2640 |
+
{"current_steps": 2640, "total_steps": 3906, "loss": 1.3419, "learning_rate": 9.68268085012544e-06, "epoch": 0.67584, "percentage": 67.59, "elapsed_time": "9:42:09", "remaining_time": "4:39:10"}
|
2641 |
+
{"current_steps": 2641, "total_steps": 3906, "loss": 1.3574, "learning_rate": 9.668761309284331e-06, "epoch": 0.676096, "percentage": 67.61, "elapsed_time": "9:42:22", "remaining_time": "4:38:56"}
|
2642 |
+
{"current_steps": 2642, "total_steps": 3906, "loss": 1.324, "learning_rate": 9.654848590705586e-06, "epoch": 0.676352, "percentage": 67.64, "elapsed_time": "9:42:35", "remaining_time": "4:38:43"}
|
2643 |
+
{"current_steps": 2643, "total_steps": 3906, "loss": 1.3318, "learning_rate": 9.640942703576523e-06, "epoch": 0.676608, "percentage": 67.67, "elapsed_time": "9:42:48", "remaining_time": "4:38:30"}
|
2644 |
+
{"current_steps": 2644, "total_steps": 3906, "loss": 1.3332, "learning_rate": 9.627043657079919e-06, "epoch": 0.676864, "percentage": 67.69, "elapsed_time": "9:43:02", "remaining_time": "4:38:17"}
|
2645 |
+
{"current_steps": 2645, "total_steps": 3906, "loss": 1.3698, "learning_rate": 9.613151460394056e-06, "epoch": 0.67712, "percentage": 67.72, "elapsed_time": "9:43:15", "remaining_time": "4:38:04"}
|
2646 |
+
{"current_steps": 2646, "total_steps": 3906, "loss": 1.3434, "learning_rate": 9.599266122692685e-06, "epoch": 0.677376, "percentage": 67.74, "elapsed_time": "9:43:28", "remaining_time": "4:37:50"}
|
2647 |
+
{"current_steps": 2647, "total_steps": 3906, "loss": 1.3355, "learning_rate": 9.585387653145035e-06, "epoch": 0.677632, "percentage": 67.77, "elapsed_time": "9:43:41", "remaining_time": "4:37:37"}
|
2648 |
+
{"current_steps": 2648, "total_steps": 3906, "loss": 1.36, "learning_rate": 9.571516060915777e-06, "epoch": 0.677888, "percentage": 67.79, "elapsed_time": "9:43:55", "remaining_time": "4:37:24"}
|
2649 |
+
{"current_steps": 2649, "total_steps": 3906, "loss": 1.3247, "learning_rate": 9.557651355165063e-06, "epoch": 0.678144, "percentage": 67.82, "elapsed_time": "9:44:08", "remaining_time": "4:37:11"}
|
2650 |
+
{"current_steps": 2650, "total_steps": 3906, "loss": 1.3149, "learning_rate": 9.543793545048492e-06, "epoch": 0.6784, "percentage": 67.84, "elapsed_time": "9:44:21", "remaining_time": "4:36:57"}
|
2651 |
+
{"current_steps": 2651, "total_steps": 3906, "loss": 1.3294, "learning_rate": 9.529942639717105e-06, "epoch": 0.678656, "percentage": 67.87, "elapsed_time": "9:44:34", "remaining_time": "4:36:44"}
|
2652 |
+
{"current_steps": 2652, "total_steps": 3906, "loss": 1.3414, "learning_rate": 9.516098648317386e-06, "epoch": 0.678912, "percentage": 67.9, "elapsed_time": "9:44:47", "remaining_time": "4:36:31"}
|
2653 |
+
{"current_steps": 2653, "total_steps": 3906, "loss": 1.3293, "learning_rate": 9.502261579991266e-06, "epoch": 0.679168, "percentage": 67.92, "elapsed_time": "9:45:00", "remaining_time": "4:36:17"}
|
2654 |
+
{"current_steps": 2654, "total_steps": 3906, "loss": 1.3151, "learning_rate": 9.488431443876076e-06, "epoch": 0.679424, "percentage": 67.95, "elapsed_time": "9:45:14", "remaining_time": "4:36:04"}
|
2655 |
+
{"current_steps": 2655, "total_steps": 3906, "loss": 1.3381, "learning_rate": 9.474608249104584e-06, "epoch": 0.67968, "percentage": 67.97, "elapsed_time": "9:45:27", "remaining_time": "4:35:51"}
|
2656 |
+
{"current_steps": 2656, "total_steps": 3906, "loss": 1.3672, "learning_rate": 9.460792004804981e-06, "epoch": 0.679936, "percentage": 68.0, "elapsed_time": "9:45:40", "remaining_time": "4:35:38"}
|
2657 |
+
{"current_steps": 2657, "total_steps": 3906, "loss": 1.3086, "learning_rate": 9.446982720100864e-06, "epoch": 0.680192, "percentage": 68.02, "elapsed_time": "9:45:53", "remaining_time": "4:35:25"}
|
2658 |
+
{"current_steps": 2658, "total_steps": 3906, "loss": 1.3204, "learning_rate": 9.433180404111228e-06, "epoch": 0.680448, "percentage": 68.05, "elapsed_time": "9:46:06", "remaining_time": "4:35:11"}
|
2659 |
+
{"current_steps": 2659, "total_steps": 3906, "loss": 1.3462, "learning_rate": 9.419385065950477e-06, "epoch": 0.680704, "percentage": 68.07, "elapsed_time": "9:46:20", "remaining_time": "4:34:58"}
|
2660 |
+
{"current_steps": 2660, "total_steps": 3906, "loss": 1.3254, "learning_rate": 9.405596714728404e-06, "epoch": 0.68096, "percentage": 68.1, "elapsed_time": "9:46:33", "remaining_time": "4:34:45"}
|
2661 |
+
{"current_steps": 2661, "total_steps": 3906, "loss": 1.3325, "learning_rate": 9.391815359550173e-06, "epoch": 0.681216, "percentage": 68.13, "elapsed_time": "9:46:46", "remaining_time": "4:34:31"}
|
2662 |
+
{"current_steps": 2662, "total_steps": 3906, "loss": 1.3154, "learning_rate": 9.37804100951635e-06, "epoch": 0.681472, "percentage": 68.15, "elapsed_time": "9:46:59", "remaining_time": "4:34:18"}
|
2663 |
+
{"current_steps": 2663, "total_steps": 3906, "loss": 1.341, "learning_rate": 9.36427367372286e-06, "epoch": 0.681728, "percentage": 68.18, "elapsed_time": "9:47:12", "remaining_time": "4:34:05"}
|
2664 |
+
{"current_steps": 2664, "total_steps": 3906, "loss": 1.3318, "learning_rate": 9.35051336126101e-06, "epoch": 0.681984, "percentage": 68.2, "elapsed_time": "9:47:25", "remaining_time": "4:33:52"}
|
2665 |
+
{"current_steps": 2665, "total_steps": 3906, "loss": 1.3181, "learning_rate": 9.336760081217452e-06, "epoch": 0.68224, "percentage": 68.23, "elapsed_time": "9:47:39", "remaining_time": "4:33:38"}
|
2666 |
+
{"current_steps": 2666, "total_steps": 3906, "loss": 1.3712, "learning_rate": 9.323013842674212e-06, "epoch": 0.682496, "percentage": 68.25, "elapsed_time": "9:47:52", "remaining_time": "4:33:25"}
|
2667 |
+
{"current_steps": 2667, "total_steps": 3906, "loss": 1.3694, "learning_rate": 9.309274654708643e-06, "epoch": 0.682752, "percentage": 68.28, "elapsed_time": "9:48:05", "remaining_time": "4:33:12"}
|
2668 |
+
{"current_steps": 2668, "total_steps": 3906, "loss": 1.3163, "learning_rate": 9.295542526393464e-06, "epoch": 0.683008, "percentage": 68.31, "elapsed_time": "9:48:18", "remaining_time": "4:32:59"}
|
2669 |
+
{"current_steps": 2669, "total_steps": 3906, "loss": 1.3465, "learning_rate": 9.281817466796728e-06, "epoch": 0.683264, "percentage": 68.33, "elapsed_time": "9:48:31", "remaining_time": "4:32:45"}
|
2670 |
+
{"current_steps": 2670, "total_steps": 3906, "loss": 1.3255, "learning_rate": 9.26809948498181e-06, "epoch": 0.68352, "percentage": 68.36, "elapsed_time": "9:48:45", "remaining_time": "4:32:32"}
|
2671 |
+
{"current_steps": 2671, "total_steps": 3906, "loss": 1.3433, "learning_rate": 9.254388590007424e-06, "epoch": 0.683776, "percentage": 68.38, "elapsed_time": "9:48:58", "remaining_time": "4:32:19"}
|
2672 |
+
{"current_steps": 2672, "total_steps": 3906, "loss": 1.3324, "learning_rate": 9.240684790927598e-06, "epoch": 0.684032, "percentage": 68.41, "elapsed_time": "9:49:11", "remaining_time": "4:32:06"}
|
2673 |
+
{"current_steps": 2673, "total_steps": 3906, "loss": 1.3217, "learning_rate": 9.22698809679167e-06, "epoch": 0.684288, "percentage": 68.43, "elapsed_time": "9:49:24", "remaining_time": "4:31:52"}
|
2674 |
+
{"current_steps": 2674, "total_steps": 3906, "loss": 1.2957, "learning_rate": 9.213298516644287e-06, "epoch": 0.684544, "percentage": 68.46, "elapsed_time": "9:49:37", "remaining_time": "4:31:39"}
|
2675 |
+
{"current_steps": 2675, "total_steps": 3906, "loss": 1.3526, "learning_rate": 9.199616059525404e-06, "epoch": 0.6848, "percentage": 68.48, "elapsed_time": "9:49:50", "remaining_time": "4:31:26"}
|
2676 |
+
{"current_steps": 2676, "total_steps": 3906, "loss": 1.3235, "learning_rate": 9.185940734470282e-06, "epoch": 0.685056, "percentage": 68.51, "elapsed_time": "9:50:04", "remaining_time": "4:31:13"}
|
2677 |
+
{"current_steps": 2677, "total_steps": 3906, "loss": 1.3477, "learning_rate": 9.172272550509442e-06, "epoch": 0.685312, "percentage": 68.54, "elapsed_time": "9:50:17", "remaining_time": "4:30:59"}
|
2678 |
+
{"current_steps": 2678, "total_steps": 3906, "loss": 1.3157, "learning_rate": 9.158611516668723e-06, "epoch": 0.685568, "percentage": 68.56, "elapsed_time": "9:50:30", "remaining_time": "4:30:46"}
|
2679 |
+
{"current_steps": 2679, "total_steps": 3906, "loss": 1.3927, "learning_rate": 9.144957641969212e-06, "epoch": 0.685824, "percentage": 68.59, "elapsed_time": "9:50:43", "remaining_time": "4:30:33"}
|
2680 |
+
{"current_steps": 2680, "total_steps": 3906, "loss": 1.3106, "learning_rate": 9.13131093542729e-06, "epoch": 0.68608, "percentage": 68.61, "elapsed_time": "9:50:57", "remaining_time": "4:30:20"}
|
2681 |
+
{"current_steps": 2681, "total_steps": 3906, "loss": 1.3176, "learning_rate": 9.117671406054598e-06, "epoch": 0.686336, "percentage": 68.64, "elapsed_time": "9:51:10", "remaining_time": "4:30:07"}
|
2682 |
+
{"current_steps": 2682, "total_steps": 3906, "loss": 1.3247, "learning_rate": 9.104039062858038e-06, "epoch": 0.686592, "percentage": 68.66, "elapsed_time": "9:51:23", "remaining_time": "4:29:53"}
|
2683 |
+
{"current_steps": 2683, "total_steps": 3906, "loss": 1.3311, "learning_rate": 9.090413914839767e-06, "epoch": 0.686848, "percentage": 68.69, "elapsed_time": "9:51:36", "remaining_time": "4:29:40"}
|
2684 |
+
{"current_steps": 2684, "total_steps": 3906, "loss": 1.315, "learning_rate": 9.076795970997194e-06, "epoch": 0.687104, "percentage": 68.71, "elapsed_time": "9:51:49", "remaining_time": "4:29:27"}
|
2685 |
+
{"current_steps": 2685, "total_steps": 3906, "loss": 1.3312, "learning_rate": 9.063185240322956e-06, "epoch": 0.68736, "percentage": 68.74, "elapsed_time": "9:52:02", "remaining_time": "4:29:13"}
|
2686 |
+
{"current_steps": 2686, "total_steps": 3906, "loss": 1.346, "learning_rate": 9.04958173180494e-06, "epoch": 0.687616, "percentage": 68.77, "elapsed_time": "9:52:16", "remaining_time": "4:29:00"}
|
2687 |
+
{"current_steps": 2687, "total_steps": 3906, "loss": 1.3504, "learning_rate": 9.03598545442627e-06, "epoch": 0.687872, "percentage": 68.79, "elapsed_time": "9:52:29", "remaining_time": "4:28:47"}
|
2688 |
+
{"current_steps": 2688, "total_steps": 3906, "loss": 1.3406, "learning_rate": 9.02239641716527e-06, "epoch": 0.688128, "percentage": 68.82, "elapsed_time": "9:52:42", "remaining_time": "4:28:34"}
|
2689 |
+
{"current_steps": 2689, "total_steps": 3906, "loss": 1.3704, "learning_rate": 9.00881462899551e-06, "epoch": 0.688384, "percentage": 68.84, "elapsed_time": "9:52:55", "remaining_time": "4:28:20"}
|
2690 |
+
{"current_steps": 2690, "total_steps": 3906, "loss": 1.3604, "learning_rate": 8.995240098885765e-06, "epoch": 0.68864, "percentage": 68.87, "elapsed_time": "9:53:08", "remaining_time": "4:28:07"}
|
2691 |
+
{"current_steps": 2691, "total_steps": 3906, "loss": 1.3806, "learning_rate": 8.981672835800002e-06, "epoch": 0.688896, "percentage": 68.89, "elapsed_time": "9:53:21", "remaining_time": "4:27:54"}
|
2692 |
+
{"current_steps": 2692, "total_steps": 3906, "loss": 1.3537, "learning_rate": 8.968112848697405e-06, "epoch": 0.689152, "percentage": 68.92, "elapsed_time": "9:53:35", "remaining_time": "4:27:41"}
|
2693 |
+
{"current_steps": 2693, "total_steps": 3906, "loss": 1.3458, "learning_rate": 8.954560146532352e-06, "epoch": 0.689408, "percentage": 68.95, "elapsed_time": "9:53:48", "remaining_time": "4:27:27"}
|
2694 |
+
{"current_steps": 2694, "total_steps": 3906, "loss": 1.3123, "learning_rate": 8.941014738254408e-06, "epoch": 0.689664, "percentage": 68.97, "elapsed_time": "9:54:01", "remaining_time": "4:27:14"}
|
2695 |
+
{"current_steps": 2695, "total_steps": 3906, "loss": 1.3119, "learning_rate": 8.927476632808321e-06, "epoch": 0.68992, "percentage": 69.0, "elapsed_time": "9:54:14", "remaining_time": "4:27:01"}
|
2696 |
+
{"current_steps": 2696, "total_steps": 3906, "loss": 1.3607, "learning_rate": 8.913945839134024e-06, "epoch": 0.690176, "percentage": 69.02, "elapsed_time": "9:54:27", "remaining_time": "4:26:48"}
|
2697 |
+
{"current_steps": 2697, "total_steps": 3906, "loss": 1.3173, "learning_rate": 8.900422366166597e-06, "epoch": 0.690432, "percentage": 69.05, "elapsed_time": "9:54:41", "remaining_time": "4:26:34"}
|
2698 |
+
{"current_steps": 2698, "total_steps": 3906, "loss": 1.3453, "learning_rate": 8.886906222836316e-06, "epoch": 0.690688, "percentage": 69.07, "elapsed_time": "9:54:54", "remaining_time": "4:26:21"}
|
2699 |
+
{"current_steps": 2699, "total_steps": 3906, "loss": 1.3717, "learning_rate": 8.873397418068607e-06, "epoch": 0.690944, "percentage": 69.1, "elapsed_time": "9:55:07", "remaining_time": "4:26:08"}
|
2700 |
+
{"current_steps": 2700, "total_steps": 3906, "loss": 1.3662, "learning_rate": 8.859895960784035e-06, "epoch": 0.6912, "percentage": 69.12, "elapsed_time": "9:55:20", "remaining_time": "4:25:55"}
|
2701 |
+
{"current_steps": 2701, "total_steps": 3906, "loss": 1.387, "learning_rate": 8.84640185989833e-06, "epoch": 0.691456, "percentage": 69.15, "elapsed_time": "9:55:33", "remaining_time": "4:25:41"}
|
2702 |
+
{"current_steps": 2702, "total_steps": 3906, "loss": 1.3494, "learning_rate": 8.832915124322367e-06, "epoch": 0.691712, "percentage": 69.18, "elapsed_time": "9:55:47", "remaining_time": "4:25:28"}
|
2703 |
+
{"current_steps": 2703, "total_steps": 3906, "loss": 1.3331, "learning_rate": 8.819435762962139e-06, "epoch": 0.691968, "percentage": 69.2, "elapsed_time": "9:56:00", "remaining_time": "4:25:15"}
|
2704 |
+
{"current_steps": 2704, "total_steps": 3906, "loss": 1.3326, "learning_rate": 8.805963784718784e-06, "epoch": 0.692224, "percentage": 69.23, "elapsed_time": "9:56:13", "remaining_time": "4:25:02"}
|
2705 |
+
{"current_steps": 2705, "total_steps": 3906, "loss": 1.298, "learning_rate": 8.792499198488558e-06, "epoch": 0.69248, "percentage": 69.25, "elapsed_time": "9:56:26", "remaining_time": "4:24:48"}
|
2706 |
+
{"current_steps": 2706, "total_steps": 3906, "loss": 1.3582, "learning_rate": 8.77904201316284e-06, "epoch": 0.692736, "percentage": 69.28, "elapsed_time": "9:56:39", "remaining_time": "4:24:35"}
|
2707 |
+
{"current_steps": 2707, "total_steps": 3906, "loss": 1.3801, "learning_rate": 8.765592237628122e-06, "epoch": 0.692992, "percentage": 69.3, "elapsed_time": "9:56:52", "remaining_time": "4:24:22"}
|
2708 |
+
{"current_steps": 2708, "total_steps": 3906, "loss": 1.3029, "learning_rate": 8.752149880766008e-06, "epoch": 0.693248, "percentage": 69.33, "elapsed_time": "9:57:05", "remaining_time": "4:24:09"}
|
2709 |
+
{"current_steps": 2709, "total_steps": 3906, "loss": 1.3265, "learning_rate": 8.73871495145318e-06, "epoch": 0.693504, "percentage": 69.35, "elapsed_time": "9:57:19", "remaining_time": "4:23:55"}
|
2710 |
+
{"current_steps": 2710, "total_steps": 3906, "loss": 1.3093, "learning_rate": 8.725287458561447e-06, "epoch": 0.69376, "percentage": 69.38, "elapsed_time": "9:57:32", "remaining_time": "4:23:42"}
|
2711 |
+
{"current_steps": 2711, "total_steps": 3906, "loss": 1.3291, "learning_rate": 8.711867410957675e-06, "epoch": 0.694016, "percentage": 69.41, "elapsed_time": "9:57:45", "remaining_time": "4:23:29"}
|
2712 |
+
{"current_steps": 2712, "total_steps": 3906, "loss": 1.3255, "learning_rate": 8.698454817503838e-06, "epoch": 0.694272, "percentage": 69.43, "elapsed_time": "9:57:58", "remaining_time": "4:23:16"}
|
2713 |
+
{"current_steps": 2713, "total_steps": 3906, "loss": 1.344, "learning_rate": 8.685049687056978e-06, "epoch": 0.694528, "percentage": 69.46, "elapsed_time": "9:58:11", "remaining_time": "4:23:02"}
|
2714 |
+
{"current_steps": 2714, "total_steps": 3906, "loss": 1.3085, "learning_rate": 8.671652028469224e-06, "epoch": 0.694784, "percentage": 69.48, "elapsed_time": "9:58:25", "remaining_time": "4:22:49"}
|
2715 |
+
{"current_steps": 2715, "total_steps": 3906, "loss": 1.3275, "learning_rate": 8.658261850587734e-06, "epoch": 0.69504, "percentage": 69.51, "elapsed_time": "9:58:38", "remaining_time": "4:22:36"}
|
2716 |
+
{"current_steps": 2716, "total_steps": 3906, "loss": 1.3634, "learning_rate": 8.644879162254765e-06, "epoch": 0.695296, "percentage": 69.53, "elapsed_time": "9:58:51", "remaining_time": "4:22:23"}
|
2717 |
+
{"current_steps": 2717, "total_steps": 3906, "loss": 1.3154, "learning_rate": 8.631503972307606e-06, "epoch": 0.695552, "percentage": 69.56, "elapsed_time": "9:59:04", "remaining_time": "4:22:09"}
|
2718 |
+
{"current_steps": 2718, "total_steps": 3906, "loss": 1.3653, "learning_rate": 8.618136289578604e-06, "epoch": 0.695808, "percentage": 69.59, "elapsed_time": "9:59:17", "remaining_time": "4:21:56"}
|
2719 |
+
{"current_steps": 2719, "total_steps": 3906, "loss": 1.3349, "learning_rate": 8.604776122895147e-06, "epoch": 0.696064, "percentage": 69.61, "elapsed_time": "9:59:31", "remaining_time": "4:21:43"}
|
2720 |
+
{"current_steps": 2720, "total_steps": 3906, "loss": 1.3454, "learning_rate": 8.591423481079664e-06, "epoch": 0.69632, "percentage": 69.64, "elapsed_time": "9:59:44", "remaining_time": "4:21:30"}
|
2721 |
+
{"current_steps": 2721, "total_steps": 3906, "loss": 1.3406, "learning_rate": 8.578078372949594e-06, "epoch": 0.696576, "percentage": 69.66, "elapsed_time": "9:59:57", "remaining_time": "4:21:16"}
|
2722 |
+
{"current_steps": 2722, "total_steps": 3906, "loss": 1.3553, "learning_rate": 8.564740807317433e-06, "epoch": 0.696832, "percentage": 69.69, "elapsed_time": "10:00:10", "remaining_time": "4:21:03"}
|
2723 |
+
{"current_steps": 2723, "total_steps": 3906, "loss": 1.3306, "learning_rate": 8.551410792990664e-06, "epoch": 0.697088, "percentage": 69.71, "elapsed_time": "10:00:23", "remaining_time": "4:20:50"}
|
2724 |
+
{"current_steps": 2724, "total_steps": 3906, "loss": 1.3406, "learning_rate": 8.538088338771806e-06, "epoch": 0.697344, "percentage": 69.74, "elapsed_time": "10:00:37", "remaining_time": "4:20:37"}
|
2725 |
+
{"current_steps": 2725, "total_steps": 3906, "loss": 1.3355, "learning_rate": 8.524773453458381e-06, "epoch": 0.6976, "percentage": 69.76, "elapsed_time": "10:00:50", "remaining_time": "4:20:23"}
|
2726 |
+
{"current_steps": 2726, "total_steps": 3906, "loss": 1.3769, "learning_rate": 8.511466145842909e-06, "epoch": 0.697856, "percentage": 69.79, "elapsed_time": "10:01:03", "remaining_time": "4:20:10"}
|
2727 |
+
{"current_steps": 2727, "total_steps": 3906, "loss": 1.3475, "learning_rate": 8.498166424712914e-06, "epoch": 0.698112, "percentage": 69.82, "elapsed_time": "10:01:16", "remaining_time": "4:19:57"}
|
2728 |
+
{"current_steps": 2728, "total_steps": 3906, "loss": 1.2943, "learning_rate": 8.484874298850894e-06, "epoch": 0.698368, "percentage": 69.84, "elapsed_time": "10:01:29", "remaining_time": "4:19:44"}
|
2729 |
+
{"current_steps": 2729, "total_steps": 3906, "loss": 1.3152, "learning_rate": 8.471589777034343e-06, "epoch": 0.698624, "percentage": 69.87, "elapsed_time": "10:01:42", "remaining_time": "4:19:30"}
|
2730 |
+
{"current_steps": 2730, "total_steps": 3906, "loss": 1.3224, "learning_rate": 8.458312868035739e-06, "epoch": 0.69888, "percentage": 69.89, "elapsed_time": "10:01:56", "remaining_time": "4:19:17"}
|
2731 |
+
{"current_steps": 2731, "total_steps": 3906, "loss": 1.3244, "learning_rate": 8.445043580622527e-06, "epoch": 0.699136, "percentage": 69.92, "elapsed_time": "10:02:09", "remaining_time": "4:19:04"}
|
2732 |
+
{"current_steps": 2732, "total_steps": 3906, "loss": 1.3301, "learning_rate": 8.431781923557105e-06, "epoch": 0.699392, "percentage": 69.94, "elapsed_time": "10:02:22", "remaining_time": "4:18:51"}
|
2733 |
+
{"current_steps": 2733, "total_steps": 3906, "loss": 1.3515, "learning_rate": 8.418527905596862e-06, "epoch": 0.699648, "percentage": 69.97, "elapsed_time": "10:02:35", "remaining_time": "4:18:37"}
|
2734 |
+
{"current_steps": 2734, "total_steps": 3906, "loss": 1.3714, "learning_rate": 8.40528153549411e-06, "epoch": 0.699904, "percentage": 69.99, "elapsed_time": "10:02:48", "remaining_time": "4:18:24"}
|
2735 |
+
{"current_steps": 2735, "total_steps": 3906, "loss": 1.3389, "learning_rate": 8.392042821996134e-06, "epoch": 0.70016, "percentage": 70.02, "elapsed_time": "10:03:02", "remaining_time": "4:18:11"}
|
2736 |
+
{"current_steps": 2736, "total_steps": 3906, "loss": 1.3382, "learning_rate": 8.378811773845155e-06, "epoch": 0.700416, "percentage": 70.05, "elapsed_time": "10:03:15", "remaining_time": "4:17:58"}
|
2737 |
+
{"current_steps": 2737, "total_steps": 3906, "loss": 1.351, "learning_rate": 8.365588399778333e-06, "epoch": 0.700672, "percentage": 70.07, "elapsed_time": "10:03:28", "remaining_time": "4:17:44"}
|
2738 |
+
{"current_steps": 2738, "total_steps": 3906, "loss": 1.3369, "learning_rate": 8.352372708527758e-06, "epoch": 0.700928, "percentage": 70.1, "elapsed_time": "10:03:41", "remaining_time": "4:17:31"}
|
2739 |
+
{"current_steps": 2739, "total_steps": 3906, "loss": 1.3207, "learning_rate": 8.33916470882046e-06, "epoch": 0.701184, "percentage": 70.12, "elapsed_time": "10:03:54", "remaining_time": "4:17:18"}
|
2740 |
+
{"current_steps": 2740, "total_steps": 3906, "loss": 1.3244, "learning_rate": 8.325964409378362e-06, "epoch": 0.70144, "percentage": 70.15, "elapsed_time": "10:04:07", "remaining_time": "4:17:05"}
|
2741 |
+
{"current_steps": 2741, "total_steps": 3906, "loss": 1.3366, "learning_rate": 8.312771818918326e-06, "epoch": 0.701696, "percentage": 70.17, "elapsed_time": "10:04:20", "remaining_time": "4:16:51"}
|
2742 |
+
{"current_steps": 2742, "total_steps": 3906, "loss": 1.326, "learning_rate": 8.299586946152117e-06, "epoch": 0.701952, "percentage": 70.2, "elapsed_time": "10:04:34", "remaining_time": "4:16:38"}
|
2743 |
+
{"current_steps": 2743, "total_steps": 3906, "loss": 1.3507, "learning_rate": 8.286409799786409e-06, "epoch": 0.702208, "percentage": 70.23, "elapsed_time": "10:04:47", "remaining_time": "4:16:25"}
|
2744 |
+
{"current_steps": 2744, "total_steps": 3906, "loss": 1.3353, "learning_rate": 8.273240388522749e-06, "epoch": 0.702464, "percentage": 70.25, "elapsed_time": "10:05:00", "remaining_time": "4:16:12"}
|
2745 |
+
{"current_steps": 2745, "total_steps": 3906, "loss": 1.3441, "learning_rate": 8.260078721057615e-06, "epoch": 0.70272, "percentage": 70.28, "elapsed_time": "10:05:13", "remaining_time": "4:15:58"}
|
2746 |
+
{"current_steps": 2746, "total_steps": 3906, "loss": 1.363, "learning_rate": 8.246924806082338e-06, "epoch": 0.702976, "percentage": 70.3, "elapsed_time": "10:05:26", "remaining_time": "4:15:45"}
|
2747 |
+
{"current_steps": 2747, "total_steps": 3906, "loss": 1.3252, "learning_rate": 8.233778652283143e-06, "epoch": 0.703232, "percentage": 70.33, "elapsed_time": "10:05:40", "remaining_time": "4:15:32"}
|
2748 |
+
{"current_steps": 2748, "total_steps": 3906, "loss": 1.341, "learning_rate": 8.220640268341132e-06, "epoch": 0.703488, "percentage": 70.35, "elapsed_time": "10:05:53", "remaining_time": "4:15:19"}
|
2749 |
+
{"current_steps": 2749, "total_steps": 3906, "loss": 1.3187, "learning_rate": 8.207509662932276e-06, "epoch": 0.703744, "percentage": 70.38, "elapsed_time": "10:06:06", "remaining_time": "4:15:05"}
|
2750 |
+
{"current_steps": 2750, "total_steps": 3906, "loss": 1.3236, "learning_rate": 8.194386844727402e-06, "epoch": 0.704, "percentage": 70.4, "elapsed_time": "10:06:19", "remaining_time": "4:14:52"}
|
2751 |
+
{"current_steps": 2751, "total_steps": 3906, "loss": 1.3664, "learning_rate": 8.181271822392213e-06, "epoch": 0.704256, "percentage": 70.43, "elapsed_time": "10:06:32", "remaining_time": "4:14:39"}
|
2752 |
+
{"current_steps": 2752, "total_steps": 3906, "loss": 1.3405, "learning_rate": 8.168164604587234e-06, "epoch": 0.704512, "percentage": 70.46, "elapsed_time": "10:06:45", "remaining_time": "4:14:26"}
|
2753 |
+
{"current_steps": 2753, "total_steps": 3906, "loss": 1.3646, "learning_rate": 8.155065199967857e-06, "epoch": 0.704768, "percentage": 70.48, "elapsed_time": "10:06:59", "remaining_time": "4:14:12"}
|
2754 |
+
{"current_steps": 2754, "total_steps": 3906, "loss": 1.3334, "learning_rate": 8.141973617184322e-06, "epoch": 0.705024, "percentage": 70.51, "elapsed_time": "10:07:12", "remaining_time": "4:13:59"}
|
2755 |
+
{"current_steps": 2755, "total_steps": 3906, "loss": 1.3118, "learning_rate": 8.128889864881676e-06, "epoch": 0.70528, "percentage": 70.53, "elapsed_time": "10:07:25", "remaining_time": "4:13:46"}
|
2756 |
+
{"current_steps": 2756, "total_steps": 3906, "loss": 1.3489, "learning_rate": 8.115813951699818e-06, "epoch": 0.705536, "percentage": 70.56, "elapsed_time": "10:07:38", "remaining_time": "4:13:33"}
|
2757 |
+
{"current_steps": 2757, "total_steps": 3906, "loss": 1.2909, "learning_rate": 8.102745886273472e-06, "epoch": 0.705792, "percentage": 70.58, "elapsed_time": "10:07:51", "remaining_time": "4:13:19"}
|
2758 |
+
{"current_steps": 2758, "total_steps": 3906, "loss": 1.3154, "learning_rate": 8.089685677232158e-06, "epoch": 0.706048, "percentage": 70.61, "elapsed_time": "10:08:05", "remaining_time": "4:13:06"}
|
2759 |
+
{"current_steps": 2759, "total_steps": 3906, "loss": 1.329, "learning_rate": 8.076633333200225e-06, "epoch": 0.706304, "percentage": 70.63, "elapsed_time": "10:08:18", "remaining_time": "4:12:53"}
|
2760 |
+
{"current_steps": 2760, "total_steps": 3906, "loss": 1.3412, "learning_rate": 8.063588862796832e-06, "epoch": 0.70656, "percentage": 70.66, "elapsed_time": "10:08:31", "remaining_time": "4:12:40"}
|
2761 |
+
{"current_steps": 2761, "total_steps": 3906, "loss": 1.3435, "learning_rate": 8.050552274635923e-06, "epoch": 0.706816, "percentage": 70.69, "elapsed_time": "10:08:44", "remaining_time": "4:12:26"}
|
2762 |
+
{"current_steps": 2762, "total_steps": 3906, "loss": 1.3233, "learning_rate": 8.037523577326254e-06, "epoch": 0.707072, "percentage": 70.71, "elapsed_time": "10:08:57", "remaining_time": "4:12:13"}
|
2763 |
+
{"current_steps": 2763, "total_steps": 3906, "loss": 1.3626, "learning_rate": 8.024502779471361e-06, "epoch": 0.707328, "percentage": 70.74, "elapsed_time": "10:09:11", "remaining_time": "4:12:00"}
|
2764 |
+
{"current_steps": 2764, "total_steps": 3906, "loss": 1.3351, "learning_rate": 8.011489889669554e-06, "epoch": 0.707584, "percentage": 70.76, "elapsed_time": "10:09:24", "remaining_time": "4:11:47"}
|
2765 |
+
{"current_steps": 2765, "total_steps": 3906, "loss": 1.3004, "learning_rate": 7.99848491651394e-06, "epoch": 0.70784, "percentage": 70.79, "elapsed_time": "10:09:37", "remaining_time": "4:11:33"}
|
2766 |
+
{"current_steps": 2766, "total_steps": 3906, "loss": 1.3152, "learning_rate": 7.985487868592392e-06, "epoch": 0.708096, "percentage": 70.81, "elapsed_time": "10:09:50", "remaining_time": "4:11:20"}
|
2767 |
+
{"current_steps": 2767, "total_steps": 3906, "loss": 1.301, "learning_rate": 7.972498754487537e-06, "epoch": 0.708352, "percentage": 70.84, "elapsed_time": "10:10:03", "remaining_time": "4:11:07"}
|
2768 |
+
{"current_steps": 2768, "total_steps": 3906, "loss": 1.3318, "learning_rate": 7.959517582776776e-06, "epoch": 0.708608, "percentage": 70.87, "elapsed_time": "10:10:16", "remaining_time": "4:10:54"}
|
2769 |
+
{"current_steps": 2769, "total_steps": 3906, "loss": 1.3411, "learning_rate": 7.946544362032274e-06, "epoch": 0.708864, "percentage": 70.89, "elapsed_time": "10:10:30", "remaining_time": "4:10:40"}
|
2770 |
+
{"current_steps": 2770, "total_steps": 3906, "loss": 1.3805, "learning_rate": 7.933579100820914e-06, "epoch": 0.70912, "percentage": 70.92, "elapsed_time": "10:10:43", "remaining_time": "4:10:27"}
|
2771 |
+
{"current_steps": 2771, "total_steps": 3906, "loss": 1.3198, "learning_rate": 7.920621807704355e-06, "epoch": 0.709376, "percentage": 70.94, "elapsed_time": "10:10:56", "remaining_time": "4:10:14"}
|
2772 |
+
{"current_steps": 2772, "total_steps": 3906, "loss": 1.3438, "learning_rate": 7.907672491238976e-06, "epoch": 0.709632, "percentage": 70.97, "elapsed_time": "10:11:09", "remaining_time": "4:10:01"}
|
2773 |
+
{"current_steps": 2773, "total_steps": 3906, "loss": 1.3276, "learning_rate": 7.894731159975896e-06, "epoch": 0.709888, "percentage": 70.99, "elapsed_time": "10:11:22", "remaining_time": "4:09:47"}
|
2774 |
+
{"current_steps": 2774, "total_steps": 3906, "loss": 1.3291, "learning_rate": 7.881797822460965e-06, "epoch": 0.710144, "percentage": 71.02, "elapsed_time": "10:11:36", "remaining_time": "4:09:34"}
|
2775 |
+
{"current_steps": 2775, "total_steps": 3906, "loss": 1.3218, "learning_rate": 7.86887248723475e-06, "epoch": 0.7104, "percentage": 71.04, "elapsed_time": "10:11:49", "remaining_time": "4:09:21"}
|
2776 |
+
{"current_steps": 2776, "total_steps": 3906, "loss": 1.3594, "learning_rate": 7.855955162832519e-06, "epoch": 0.710656, "percentage": 71.07, "elapsed_time": "10:12:02", "remaining_time": "4:09:08"}
|
2777 |
+
{"current_steps": 2777, "total_steps": 3906, "loss": 1.3595, "learning_rate": 7.843045857784278e-06, "epoch": 0.710912, "percentage": 71.1, "elapsed_time": "10:12:15", "remaining_time": "4:08:55"}
|
2778 |
+
{"current_steps": 2778, "total_steps": 3906, "loss": 1.3338, "learning_rate": 7.830144580614709e-06, "epoch": 0.711168, "percentage": 71.12, "elapsed_time": "10:12:28", "remaining_time": "4:08:41"}
|
2779 |
+
{"current_steps": 2779, "total_steps": 3906, "loss": 1.2991, "learning_rate": 7.817251339843213e-06, "epoch": 0.711424, "percentage": 71.15, "elapsed_time": "10:12:42", "remaining_time": "4:08:28"}
|
2780 |
+
{"current_steps": 2780, "total_steps": 3906, "loss": 1.358, "learning_rate": 7.804366143983878e-06, "epoch": 0.71168, "percentage": 71.17, "elapsed_time": "10:12:55", "remaining_time": "4:08:15"}
|
2781 |
+
{"current_steps": 2781, "total_steps": 3906, "loss": 1.3379, "learning_rate": 7.791489001545483e-06, "epoch": 0.711936, "percentage": 71.2, "elapsed_time": "10:13:08", "remaining_time": "4:08:02"}
|
2782 |
+
{"current_steps": 2782, "total_steps": 3906, "loss": 1.3059, "learning_rate": 7.778619921031476e-06, "epoch": 0.712192, "percentage": 71.22, "elapsed_time": "10:13:21", "remaining_time": "4:07:48"}
|
2783 |
+
{"current_steps": 2783, "total_steps": 3906, "loss": 1.3244, "learning_rate": 7.765758910939995e-06, "epoch": 0.712448, "percentage": 71.25, "elapsed_time": "10:13:34", "remaining_time": "4:07:35"}
|
2784 |
+
{"current_steps": 2784, "total_steps": 3906, "loss": 1.3299, "learning_rate": 7.752905979763846e-06, "epoch": 0.712704, "percentage": 71.27, "elapsed_time": "10:13:47", "remaining_time": "4:07:22"}
|
2785 |
+
{"current_steps": 2785, "total_steps": 3906, "loss": 1.3683, "learning_rate": 7.740061135990493e-06, "epoch": 0.71296, "percentage": 71.3, "elapsed_time": "10:14:01", "remaining_time": "4:07:09"}
|
2786 |
+
{"current_steps": 2786, "total_steps": 3906, "loss": 1.3565, "learning_rate": 7.727224388102069e-06, "epoch": 0.713216, "percentage": 71.33, "elapsed_time": "10:14:14", "remaining_time": "4:06:55"}
|
2787 |
+
{"current_steps": 2787, "total_steps": 3906, "loss": 1.3046, "learning_rate": 7.714395744575362e-06, "epoch": 0.713472, "percentage": 71.35, "elapsed_time": "10:14:27", "remaining_time": "4:06:42"}
|
2788 |
+
{"current_steps": 2788, "total_steps": 3906, "loss": 1.3341, "learning_rate": 7.701575213881788e-06, "epoch": 0.713728, "percentage": 71.38, "elapsed_time": "10:14:40", "remaining_time": "4:06:29"}
|
2789 |
+
{"current_steps": 2789, "total_steps": 3906, "loss": 1.3023, "learning_rate": 7.688762804487437e-06, "epoch": 0.713984, "percentage": 71.4, "elapsed_time": "10:14:53", "remaining_time": "4:06:16"}
|
2790 |
+
{"current_steps": 2790, "total_steps": 3906, "loss": 1.3017, "learning_rate": 7.675958524853003e-06, "epoch": 0.71424, "percentage": 71.43, "elapsed_time": "10:15:07", "remaining_time": "4:06:02"}
|
2791 |
+
{"current_steps": 2791, "total_steps": 3906, "loss": 1.3743, "learning_rate": 7.663162383433834e-06, "epoch": 0.714496, "percentage": 71.45, "elapsed_time": "10:15:20", "remaining_time": "4:05:49"}
|
2792 |
+
{"current_steps": 2792, "total_steps": 3906, "loss": 1.3718, "learning_rate": 7.6503743886799e-06, "epoch": 0.714752, "percentage": 71.48, "elapsed_time": "10:15:33", "remaining_time": "4:05:36"}
|
2793 |
+
{"current_steps": 2793, "total_steps": 3906, "loss": 1.3181, "learning_rate": 7.637594549035787e-06, "epoch": 0.715008, "percentage": 71.51, "elapsed_time": "10:15:46", "remaining_time": "4:05:23"}
|
2794 |
+
{"current_steps": 2794, "total_steps": 3906, "loss": 1.3074, "learning_rate": 7.624822872940707e-06, "epoch": 0.715264, "percentage": 71.53, "elapsed_time": "10:15:59", "remaining_time": "4:05:09"}
|
2795 |
+
{"current_steps": 2795, "total_steps": 3906, "loss": 1.3287, "learning_rate": 7.612059368828457e-06, "epoch": 0.71552, "percentage": 71.56, "elapsed_time": "10:16:12", "remaining_time": "4:04:56"}
|
2796 |
+
{"current_steps": 2796, "total_steps": 3906, "loss": 1.3188, "learning_rate": 7.59930404512746e-06, "epoch": 0.715776, "percentage": 71.58, "elapsed_time": "10:16:25", "remaining_time": "4:04:43"}
|
2797 |
+
{"current_steps": 2797, "total_steps": 3906, "loss": 1.3312, "learning_rate": 7.5865569102607295e-06, "epoch": 0.716032, "percentage": 71.61, "elapsed_time": "10:16:39", "remaining_time": "4:04:30"}
|
2798 |
+
{"current_steps": 2798, "total_steps": 3906, "loss": 1.326, "learning_rate": 7.573817972645872e-06, "epoch": 0.716288, "percentage": 71.63, "elapsed_time": "10:16:52", "remaining_time": "4:04:16"}
|
2799 |
+
{"current_steps": 2799, "total_steps": 3906, "loss": 1.3233, "learning_rate": 7.561087240695086e-06, "epoch": 0.716544, "percentage": 71.66, "elapsed_time": "10:17:05", "remaining_time": "4:04:03"}
|
2800 |
+
{"current_steps": 2800, "total_steps": 3906, "loss": 1.3487, "learning_rate": 7.548364722815142e-06, "epoch": 0.7168, "percentage": 71.68, "elapsed_time": "10:17:18", "remaining_time": "4:03:50"}
|
2801 |
+
{"current_steps": 2801, "total_steps": 3906, "loss": 1.3108, "learning_rate": 7.535650427407379e-06, "epoch": 0.717056, "percentage": 71.71, "elapsed_time": "10:17:51", "remaining_time": "4:03:44"}
|
2802 |
+
{"current_steps": 2802, "total_steps": 3906, "loss": 1.3419, "learning_rate": 7.52294436286773e-06, "epoch": 0.717312, "percentage": 71.74, "elapsed_time": "10:18:04", "remaining_time": "4:03:31"}
|
2803 |
+
{"current_steps": 2803, "total_steps": 3906, "loss": 1.331, "learning_rate": 7.5102465375866765e-06, "epoch": 0.717568, "percentage": 71.76, "elapsed_time": "10:18:18", "remaining_time": "4:03:18"}
|
2804 |
+
{"current_steps": 2804, "total_steps": 3906, "loss": 1.3132, "learning_rate": 7.497556959949262e-06, "epoch": 0.717824, "percentage": 71.79, "elapsed_time": "10:18:31", "remaining_time": "4:03:05"}
|
2805 |
+
{"current_steps": 2805, "total_steps": 3906, "loss": 1.3489, "learning_rate": 7.484875638335087e-06, "epoch": 0.71808, "percentage": 71.81, "elapsed_time": "10:18:44", "remaining_time": "4:02:51"}
|
2806 |
+
{"current_steps": 2806, "total_steps": 3906, "loss": 1.3331, "learning_rate": 7.472202581118304e-06, "epoch": 0.718336, "percentage": 71.84, "elapsed_time": "10:18:57", "remaining_time": "4:02:38"}
|
2807 |
+
{"current_steps": 2807, "total_steps": 3906, "loss": 1.3097, "learning_rate": 7.459537796667589e-06, "epoch": 0.718592, "percentage": 71.86, "elapsed_time": "10:19:10", "remaining_time": "4:02:25"}
|
2808 |
+
{"current_steps": 2808, "total_steps": 3906, "loss": 1.2853, "learning_rate": 7.446881293346171e-06, "epoch": 0.718848, "percentage": 71.89, "elapsed_time": "10:19:23", "remaining_time": "4:02:12"}
|
2809 |
+
{"current_steps": 2809, "total_steps": 3906, "loss": 1.3585, "learning_rate": 7.434233079511812e-06, "epoch": 0.719104, "percentage": 71.92, "elapsed_time": "10:19:37", "remaining_time": "4:01:58"}
|
2810 |
+
{"current_steps": 2810, "total_steps": 3906, "loss": 1.3099, "learning_rate": 7.4215931635168e-06, "epoch": 0.71936, "percentage": 71.94, "elapsed_time": "10:19:50", "remaining_time": "4:01:45"}
|
2811 |
+
{"current_steps": 2811, "total_steps": 3906, "loss": 1.3425, "learning_rate": 7.408961553707925e-06, "epoch": 0.719616, "percentage": 71.97, "elapsed_time": "10:20:03", "remaining_time": "4:01:32"}
|
2812 |
+
{"current_steps": 2812, "total_steps": 3906, "loss": 1.3069, "learning_rate": 7.396338258426521e-06, "epoch": 0.719872, "percentage": 71.99, "elapsed_time": "10:20:16", "remaining_time": "4:01:19"}
|
2813 |
+
{"current_steps": 2813, "total_steps": 3906, "loss": 1.3661, "learning_rate": 7.383723286008402e-06, "epoch": 0.720128, "percentage": 72.02, "elapsed_time": "10:20:29", "remaining_time": "4:01:05"}
|
2814 |
+
{"current_steps": 2814, "total_steps": 3906, "loss": 1.3135, "learning_rate": 7.371116644783905e-06, "epoch": 0.720384, "percentage": 72.04, "elapsed_time": "10:20:42", "remaining_time": "4:00:52"}
|