Itoifi commited on
Commit
395cdfb
1 Parent(s): 42aa667

Upload 16 files

Browse files
32k-luna/D_12000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f8434247fb257fe8b9d856815e430dedd7b444b26e616dd4fae9efc62e692945
3
+ size 561098185
32k-luna/D_18000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fb0f1309947cbbf00598cef73e31354bec21988fc8d6ad34d3cef7ad339dcf12
3
+ size 561098185
32k-luna/D_24000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d8fcbac054f85aba4424912ca35f8a9f8836d862107386b33127c5b0938dba9
3
+ size 561098185
32k-luna/D_28000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b5ccf1aa993f894a6a2414e5c861ac32819a02d40add11db780cf44c7cd77ae9
3
+ size 561098185
32k-luna/D_6000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84ed1f186b66eb9b5c36b8765ade0536830eed618880fd1c73aa265bf04f3f02
3
+ size 561098185
32k-luna/G_12000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b6913aed0f9a589474a9079b34ee94d1f905477ef028f52a68f094cab567ff6
3
+ size 699505437
32k-luna/G_18000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5dee48ccc2b516451f81df9556054036a9334d75df12dd96635181a052e1eac0
3
+ size 699505437
32k-luna/G_24000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d244de1910501ad0c52ee443e6bb8e1508654261509b88a768025ed85a7ab43
3
+ size 699505437
32k-luna/G_28000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1e6a0e5ba0edbf1021de3f58e9faddacbe3b8ad3e3548232d07e9285147ef9b
3
+ size 699505437
32k-luna/G_6000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0dd00e4ad249553e651948e29323deee095dfbce2aa6b96bc7dc33c5cec0c38d
3
+ size 699505437
32k-luna/config.json ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "train": {
3
+ "log_interval": 200,
4
+ "eval_interval": 1000,
5
+ "seed": 1234,
6
+ "epochs": 10000,
7
+ "learning_rate": 0.0001,
8
+ "betas": [
9
+ 0.8,
10
+ 0.99
11
+ ],
12
+ "eps": 1e-09,
13
+ "batch_size": 6,
14
+ "fp16_run": false,
15
+ "lr_decay": 0.999875,
16
+ "segment_size": 17920,
17
+ "init_lr_ratio": 1,
18
+ "warmup_epochs": 0,
19
+ "c_mel": 45,
20
+ "c_kl": 1.0,
21
+ "use_sr": true,
22
+ "max_speclen": 384,
23
+ "port": "8001"
24
+ },
25
+ "data": {
26
+ "training_files": "filelists/train.txt",
27
+ "validation_files": "filelists/val.txt",
28
+ "max_wav_value": 32768.0,
29
+ "sampling_rate": 32000,
30
+ "filter_length": 1280,
31
+ "hop_length": 320,
32
+ "win_length": 1280,
33
+ "n_mel_channels": 80,
34
+ "mel_fmin": 0.0,
35
+ "mel_fmax": null
36
+ },
37
+ "model": {
38
+ "inter_channels": 192,
39
+ "hidden_channels": 192,
40
+ "filter_channels": 768,
41
+ "n_heads": 2,
42
+ "n_layers": 6,
43
+ "kernel_size": 3,
44
+ "p_dropout": 0.1,
45
+ "resblock": "1",
46
+ "resblock_kernel_sizes": [
47
+ 3,
48
+ 7,
49
+ 11
50
+ ],
51
+ "resblock_dilation_sizes": [
52
+ [
53
+ 1,
54
+ 3,
55
+ 5
56
+ ],
57
+ [
58
+ 1,
59
+ 3,
60
+ 5
61
+ ],
62
+ [
63
+ 1,
64
+ 3,
65
+ 5
66
+ ]
67
+ ],
68
+ "upsample_rates": [
69
+ 10,
70
+ 8,
71
+ 2,
72
+ 2
73
+ ],
74
+ "upsample_initial_channel": 512,
75
+ "upsample_kernel_sizes": [
76
+ 16,
77
+ 16,
78
+ 4,
79
+ 4
80
+ ],
81
+ "n_layers_q": 3,
82
+ "use_spectral_norm": false,
83
+ "gin_channels": 256,
84
+ "ssl_dim": 256,
85
+ "n_speakers": 2
86
+ },
87
+ "spk": {
88
+ "luna": 0
89
+ }
90
+ }
32k-luna/eval/events.out.tfevents.1675723255.DESKTOP-P582Q00.61176.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a13a52fed9968137a574ee6e9cda186615912bca7a86886de9247c06abb428a8
3
+ size 40
32k-luna/eval/events.out.tfevents.1675723301.DESKTOP-P582Q00.44436.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37d99993b00e6489f2f3a4a9470dae98ee13e0324a59ef37c042234b8e9fd282
3
+ size 40
32k-luna/eval/events.out.tfevents.1675723345.DESKTOP-P582Q00.51616.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f987cd76bee7c2db1682ccbc91d91ec55225f2307a18f75780a2f5af47df6387
3
+ size 1887159
32k-luna/eval/events.out.tfevents.1675723462.DESKTOP-P582Q00.63824.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33c777b29b44fe69083b59dea44d58bf5a2eb95dc2c9d262ee4c4cc9acb68bb6
3
+ size 54737964
32k-luna/train.log ADDED
@@ -0,0 +1,679 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2023-02-07 09:40:53,280 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 6, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'luna': 0}, 'model_dir': './logs\\32k'}
2
+ 2023-02-07 09:41:39,266 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 12, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'luna': 0}, 'model_dir': './logs\\32k'}
3
+ 2023-02-07 09:42:23,356 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 6, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'luna': 0}, 'model_dir': './logs\\32k'}
4
+ 2023-02-07 09:42:53,692 32k INFO Train Epoch: 1 [0%]
5
+ 2023-02-07 09:42:53,693 32k INFO [5.986945629119873, 5.2334113121032715, 1.1685032844543457, 101.684814453125, 285.0567321777344, 0, 0.0001]
6
+ 2023-02-07 09:42:59,408 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\G_0.pth
7
+ 2023-02-07 09:43:14,302 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\D_0.pth
8
+ 2023-02-07 09:44:20,696 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 6, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'luna': 0}, 'model_dir': './logs\\32k'}
9
+ 2023-02-07 09:44:25,541 32k INFO Loaded checkpoint './logs\32k\G_0.pth' (iteration 1)
10
+ 2023-02-07 09:44:25,958 32k INFO Loaded checkpoint './logs\32k\D_0.pth' (iteration 1)
11
+ 2023-02-07 09:44:51,915 32k INFO Train Epoch: 1 [0%]
12
+ 2023-02-07 09:44:51,915 32k INFO [2.594619035720825, 2.5604355335235596, 15.0297269821167, 45.45681381225586, 11.650612831115723, 0, 0.0001]
13
+ 2023-02-07 09:44:57,597 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\G_0.pth
14
+ 2023-02-07 09:45:13,749 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\D_0.pth
15
+ 2023-02-07 09:47:02,986 32k INFO ====> Epoch: 1
16
+ 2023-02-07 09:49:09,645 32k INFO ====> Epoch: 2
17
+ 2023-02-07 09:50:00,551 32k INFO Train Epoch: 3 [27%]
18
+ 2023-02-07 09:50:00,551 32k INFO [2.360294818878174, 2.3960258960723877, 13.918700218200684, 23.92392349243164, 1.0044713020324707, 200, 9.99750015625e-05]
19
+ 2023-02-07 09:51:15,724 32k INFO ====> Epoch: 3
20
+ 2023-02-07 09:53:21,228 32k INFO ====> Epoch: 4
21
+ 2023-02-07 09:54:40,244 32k INFO Train Epoch: 5 [55%]
22
+ 2023-02-07 09:54:40,244 32k INFO [2.5581417083740234, 2.0560226440429688, 12.119918823242188, 23.648845672607422, 1.1536203622817993, 400, 9.995000937421877e-05]
23
+ 2023-02-07 09:55:26,649 32k INFO ====> Epoch: 5
24
+ 2023-02-07 09:57:31,903 32k INFO ====> Epoch: 6
25
+ 2023-02-07 09:59:20,458 32k INFO Train Epoch: 7 [82%]
26
+ 2023-02-07 09:59:20,459 32k INFO [2.645043134689331, 2.0071868896484375, 8.105449676513672, 17.2309627532959, 0.8960679173469543, 600, 9.99250234335941e-05]
27
+ 2023-02-07 09:59:39,088 32k INFO ====> Epoch: 7
28
+ 2023-02-07 10:01:44,439 32k INFO ====> Epoch: 8
29
+ 2023-02-07 10:03:50,397 32k INFO ====> Epoch: 9
30
+ 2023-02-07 10:04:22,558 32k INFO Train Epoch: 10 [9%]
31
+ 2023-02-07 10:04:22,558 32k INFO [2.5706863403320312, 2.335548162460327, 11.097796440124512, 19.198232650756836, 1.0098469257354736, 800, 9.98875562335968e-05]
32
+ 2023-02-07 10:05:57,649 32k INFO ====> Epoch: 10
33
+ 2023-02-07 10:08:05,106 32k INFO ====> Epoch: 11
34
+ 2023-02-07 10:09:05,366 32k INFO Train Epoch: 12 [36%]
35
+ 2023-02-07 10:09:05,367 32k INFO [2.2724082469940186, 2.844191312789917, 11.882951736450195, 18.201112747192383, 1.388056755065918, 1000, 9.986258590528146e-05]
36
+ 2023-02-07 10:09:09,750 32k INFO Saving model and optimizer state at iteration 12 to ./logs\32k\G_1000.pth
37
+ 2023-02-07 10:09:27,650 32k INFO Saving model and optimizer state at iteration 12 to ./logs\32k\D_1000.pth
38
+ 2023-02-07 10:10:37,986 32k INFO ====> Epoch: 12
39
+ 2023-02-07 10:12:44,311 32k INFO ====> Epoch: 13
40
+ 2023-02-07 10:14:12,926 32k INFO Train Epoch: 14 [64%]
41
+ 2023-02-07 10:14:12,926 32k INFO [2.474606990814209, 2.230579376220703, 11.757043838500977, 16.921762466430664, 1.2078344821929932, 1200, 9.983762181915804e-05]
42
+ 2023-02-07 10:14:50,340 32k INFO ====> Epoch: 14
43
+ 2023-02-07 10:16:57,519 32k INFO ====> Epoch: 15
44
+ 2023-02-07 10:18:54,687 32k INFO Train Epoch: 16 [91%]
45
+ 2023-02-07 10:18:54,687 32k INFO [2.446239709854126, 2.3385403156280518, 12.67339038848877, 20.93482208251953, 0.8955264687538147, 1400, 9.981266397366609e-05]
46
+ 2023-02-07 10:19:03,624 32k INFO ====> Epoch: 16
47
+ 2023-02-07 10:21:09,258 32k INFO ====> Epoch: 17
48
+ 2023-02-07 10:23:14,506 32k INFO ====> Epoch: 18
49
+ 2023-02-07 10:23:55,989 32k INFO Train Epoch: 19 [18%]
50
+ 2023-02-07 10:23:55,990 32k INFO [2.6378839015960693, 2.2939138412475586, 11.7266206741333, 19.231109619140625, 1.1356555223464966, 1600, 9.977523890319963e-05]
51
+ 2023-02-07 10:25:20,577 32k INFO ====> Epoch: 19
52
+ 2023-02-07 10:27:26,527 32k INFO ====> Epoch: 20
53
+ 2023-02-07 10:28:36,306 32k INFO Train Epoch: 21 [45%]
54
+ 2023-02-07 10:28:36,307 32k INFO [2.761199474334717, 2.040116786956787, 11.117597579956055, 15.121772766113281, 0.9549421668052673, 1800, 9.975029665246193e-05]
55
+ 2023-02-07 10:29:32,416 32k INFO ====> Epoch: 21
56
+ 2023-02-07 10:31:48,932 32k INFO ====> Epoch: 22
57
+ 2023-02-07 10:33:38,266 32k INFO Train Epoch: 23 [73%]
58
+ 2023-02-07 10:33:38,266 32k INFO [2.4851253032684326, 2.39241361618042, 10.837175369262695, 16.3918514251709, 1.0230045318603516, 2000, 9.972536063689719e-05]
59
+ 2023-02-07 10:33:42,963 32k INFO Saving model and optimizer state at iteration 23 to ./logs\32k\G_2000.pth
60
+ 2023-02-07 10:33:58,853 32k INFO Saving model and optimizer state at iteration 23 to ./logs\32k\D_2000.pth
61
+ 2023-02-07 10:34:47,989 32k INFO ====> Epoch: 23
62
+ 2023-02-07 10:38:48,011 32k INFO ====> Epoch: 24
63
+ 2023-02-07 10:42:49,265 32k INFO ====> Epoch: 25
64
+ 2023-02-07 10:43:13,039 32k INFO Train Epoch: 26 [0%]
65
+ 2023-02-07 10:43:13,039 32k INFO [2.6076807975769043, 2.061424732208252, 13.766759872436523, 18.384117126464844, 0.7564023733139038, 2200, 9.968796830108985e-05]
66
+ 2023-02-07 10:46:56,559 32k INFO ====> Epoch: 26
67
+ 2023-02-07 10:51:03,691 32k INFO ====> Epoch: 27
68
+ 2023-02-07 10:52:29,260 32k INFO Train Epoch: 28 [27%]
69
+ 2023-02-07 10:52:29,261 32k INFO [2.44578218460083, 2.06921124458313, 9.142973899841309, 12.838541984558105, 0.592533528804779, 2400, 9.966304786663908e-05]
70
+ 2023-02-07 10:55:15,394 32k INFO ====> Epoch: 28
71
+ 2023-02-07 10:59:20,071 32k INFO ====> Epoch: 29
72
+ 2023-02-07 11:01:53,637 32k INFO Train Epoch: 30 [55%]
73
+ 2023-02-07 11:01:53,637 32k INFO [2.4569437503814697, 2.0591907501220703, 9.697546005249023, 18.9813232421875, 0.6515152454376221, 2600, 9.963813366190753e-05]
74
+ 2023-02-07 11:03:29,785 32k INFO ====> Epoch: 30
75
+ 2023-02-07 11:07:53,678 32k INFO ====> Epoch: 31
76
+ 2023-02-07 11:11:27,366 32k INFO Train Epoch: 32 [82%]
77
+ 2023-02-07 11:11:27,367 32k INFO [2.5969486236572266, 1.939130425453186, 11.549643516540527, 17.018590927124023, 0.9572107791900635, 2800, 9.961322568533789e-05]
78
+ 2023-02-07 11:12:09,727 32k INFO ====> Epoch: 32
79
+ 2023-02-07 11:16:18,217 32k INFO ====> Epoch: 33
80
+ 2023-02-07 11:20:33,915 32k INFO ====> Epoch: 34
81
+ 2023-02-07 11:21:21,087 32k INFO Train Epoch: 35 [9%]
82
+ 2023-02-07 11:21:21,088 32k INFO [2.5165798664093018, 2.2998545169830322, 12.771017074584961, 19.952123641967773, 0.6407254338264465, 3000, 9.957587539488128e-05]
83
+ 2023-02-07 11:21:25,805 32k INFO Saving model and optimizer state at iteration 35 to ./logs\32k\G_3000.pth
84
+ 2023-02-07 11:21:45,295 32k INFO Saving model and optimizer state at iteration 35 to ./logs\32k\D_3000.pth
85
+ 2023-02-07 11:25:09,760 32k INFO ====> Epoch: 35
86
+ 2023-02-07 11:29:27,873 32k INFO ====> Epoch: 36
87
+ 2023-02-07 11:31:18,011 32k INFO Train Epoch: 37 [36%]
88
+ 2023-02-07 11:31:18,011 32k INFO [2.619579315185547, 2.3391668796539307, 9.163888931274414, 16.027456283569336, 0.9692599773406982, 3200, 9.95509829819056e-05]
89
+ 2023-02-07 11:33:47,642 32k INFO ====> Epoch: 37
90
+ 2023-02-07 11:37:56,855 32k INFO ====> Epoch: 38
91
+ 2023-02-07 11:40:45,204 32k INFO Train Epoch: 39 [64%]
92
+ 2023-02-07 11:40:45,205 32k INFO [2.350425958633423, 2.2603647708892822, 13.19098949432373, 20.018461227416992, 1.0738447904586792, 3400, 9.952609679164422e-05]
93
+ 2023-02-07 11:42:03,774 32k INFO ====> Epoch: 39
94
+ 2023-02-07 11:45:59,746 32k INFO ====> Epoch: 40
95
+ 2023-02-07 11:49:51,207 32k INFO Train Epoch: 41 [91%]
96
+ 2023-02-07 11:49:51,208 32k INFO [2.582878351211548, 2.3701279163360596, 12.43442153930664, 17.008121490478516, 1.1644971370697021, 3600, 9.950121682254156e-05]
97
+ 2023-02-07 11:50:09,809 32k INFO ====> Epoch: 41
98
+ 2023-02-07 11:53:55,535 32k INFO ====> Epoch: 42
99
+ 2023-02-07 11:57:39,870 32k INFO ====> Epoch: 43
100
+ 2023-02-07 11:58:43,347 32k INFO Train Epoch: 44 [18%]
101
+ 2023-02-07 11:58:43,347 32k INFO [2.5977330207824707, 2.3910253047943115, 11.031622886657715, 19.40188217163086, 0.8780531287193298, 3800, 9.94639085301583e-05]
102
+ 2023-02-07 12:01:31,992 32k INFO ====> Epoch: 44
103
+ 2023-02-07 12:05:23,822 32k INFO ====> Epoch: 45
104
+ 2023-02-07 12:07:21,305 32k INFO Train Epoch: 46 [45%]
105
+ 2023-02-07 12:07:21,306 32k INFO [2.4390363693237305, 2.45821213722229, 10.24112606048584, 15.837735176086426, 0.7886309027671814, 4000, 9.943904410714931e-05]
106
+ 2023-02-07 12:07:25,841 32k INFO Saving model and optimizer state at iteration 46 to ./logs\32k\G_4000.pth
107
+ 2023-02-07 12:07:42,252 32k INFO Saving model and optimizer state at iteration 46 to ./logs\32k\D_4000.pth
108
+ 2023-02-07 12:09:31,329 32k INFO ====> Epoch: 46
109
+ 2023-02-07 12:13:21,187 32k INFO ====> Epoch: 47
110
+ 2023-02-07 12:15:05,159 32k INFO Train Epoch: 48 [73%]
111
+ 2023-02-07 12:15:05,159 32k INFO [2.4634478092193604, 2.3640332221984863, 11.501005172729492, 16.302955627441406, 0.7573157548904419, 4200, 9.941418589985758e-05]
112
+ 2023-02-07 12:15:35,781 32k INFO ====> Epoch: 48
113
+ 2023-02-07 12:18:23,940 32k INFO ====> Epoch: 49
114
+ 2023-02-07 12:20:51,588 32k INFO ====> Epoch: 50
115
+ 2023-02-07 12:21:14,338 32k INFO Train Epoch: 51 [0%]
116
+ 2023-02-07 12:21:14,338 32k INFO [2.6959879398345947, 1.971150279045105, 8.562164306640625, 14.40481948852539, 0.9920418858528137, 4400, 9.937691023999092e-05]
117
+ 2023-02-07 12:23:05,457 32k INFO ====> Epoch: 51
118
+ 2023-02-07 12:25:18,467 32k INFO ====> Epoch: 52
119
+ 2023-02-07 12:26:11,363 32k INFO Train Epoch: 53 [27%]
120
+ 2023-02-07 12:26:11,364 32k INFO [2.3754444122314453, 2.2341160774230957, 11.600547790527344, 15.735363960266113, 0.42374899983406067, 4600, 9.935206756519513e-05]
121
+ 2023-02-07 12:27:32,699 32k INFO ====> Epoch: 53
122
+ 2023-02-07 12:29:47,264 32k INFO ====> Epoch: 54
123
+ 2023-02-07 12:31:10,391 32k INFO Train Epoch: 55 [55%]
124
+ 2023-02-07 12:31:10,391 32k INFO [2.421271324157715, 2.085864543914795, 11.170856475830078, 19.666301727294922, 0.7788003087043762, 4800, 9.932723110067987e-05]
125
+ 2023-02-07 12:32:00,890 32k INFO ====> Epoch: 55
126
+ 2023-02-07 12:34:14,788 32k INFO ====> Epoch: 56
127
+ 2023-02-07 12:36:08,772 32k INFO Train Epoch: 57 [82%]
128
+ 2023-02-07 12:36:08,772 32k INFO [2.432823657989502, 2.2199151515960693, 11.207275390625, 15.497830390930176, 0.9771067500114441, 5000, 9.930240084489267e-05]
129
+ 2023-02-07 12:36:13,538 32k INFO Saving model and optimizer state at iteration 57 to ./logs\32k\G_5000.pth
130
+ 2023-02-07 12:36:31,240 32k INFO Saving model and optimizer state at iteration 57 to ./logs\32k\D_5000.pth
131
+ 2023-02-07 12:36:54,614 32k INFO ====> Epoch: 57
132
+ 2023-02-07 12:39:07,517 32k INFO ====> Epoch: 58
133
+ 2023-02-07 12:41:22,030 32k INFO ====> Epoch: 59
134
+ 2023-02-07 12:41:55,027 32k INFO Train Epoch: 60 [9%]
135
+ 2023-02-07 12:41:55,027 32k INFO [2.7963428497314453, 1.8747531175613403, 10.772848129272461, 14.761898040771484, 0.5415222644805908, 5200, 9.926516709918191e-05]
136
+ 2023-02-07 12:43:35,208 32k INFO ====> Epoch: 60
137
+ 2023-02-07 12:45:47,923 32k INFO ====> Epoch: 61
138
+ 2023-02-07 12:46:50,887 32k INFO Train Epoch: 62 [36%]
139
+ 2023-02-07 12:46:50,887 32k INFO [2.6340723037719727, 2.1284472942352295, 8.606325149536133, 17.0094051361084, 0.818625807762146, 5400, 9.924035235842533e-05]
140
+ 2023-02-07 12:48:00,866 32k INFO ====> Epoch: 62
141
+ 2023-02-07 12:50:15,943 32k INFO ====> Epoch: 63
142
+ 2023-02-07 12:51:49,996 32k INFO Train Epoch: 64 [64%]
143
+ 2023-02-07 12:51:49,996 32k INFO [2.306647539138794, 2.406829357147217, 9.929342269897461, 12.849090576171875, 0.8835707902908325, 5600, 9.921554382096622e-05]
144
+ 2023-02-07 12:52:32,999 32k INFO ====> Epoch: 64
145
+ 2023-02-07 12:55:11,641 32k INFO ====> Epoch: 65
146
+ 2023-02-07 12:57:17,736 32k INFO Train Epoch: 66 [91%]
147
+ 2023-02-07 12:57:17,737 32k INFO [2.5442795753479004, 2.281177282333374, 9.918232917785645, 17.03926658630371, 0.7411346435546875, 5800, 9.919074148525384e-05]
148
+ 2023-02-07 12:57:27,339 32k INFO ====> Epoch: 66
149
+ 2023-02-07 12:59:41,684 32k INFO ====> Epoch: 67
150
+ 2023-02-07 13:01:55,764 32k INFO ====> Epoch: 68
151
+ 2023-02-07 13:02:38,827 32k INFO Train Epoch: 69 [18%]
152
+ 2023-02-07 13:02:38,828 32k INFO [2.463815689086914, 2.242199182510376, 11.57142448425293, 19.842445373535156, 0.521164059638977, 6000, 9.915354960656915e-05]
153
+ 2023-02-07 13:02:43,491 32k INFO Saving model and optimizer state at iteration 69 to ./logs\32k\G_6000.pth
154
+ 2023-02-07 13:03:00,115 32k INFO Saving model and optimizer state at iteration 69 to ./logs\32k\D_6000.pth
155
+ 2023-02-07 13:04:34,671 32k INFO ====> Epoch: 69
156
+ 2023-02-07 13:06:49,771 32k INFO ====> Epoch: 70
157
+ 2023-02-07 13:08:02,981 32k INFO Train Epoch: 71 [45%]
158
+ 2023-02-07 13:08:02,981 32k INFO [2.4700756072998047, 2.2984137535095215, 14.3892822265625, 18.679941177368164, 0.8965722322463989, 6200, 9.912876276844171e-05]
159
+ 2023-02-07 13:09:03,486 32k INFO ====> Epoch: 71
160
+ 2023-02-07 13:11:17,617 32k INFO ====> Epoch: 72
161
+ 2023-02-07 13:13:02,651 32k INFO Train Epoch: 73 [73%]
162
+ 2023-02-07 13:13:02,651 32k INFO [2.556546688079834, 2.2346560955047607, 9.572005271911621, 16.495567321777344, 0.8224639296531677, 6400, 9.910398212663652e-05]
163
+ 2023-02-07 13:13:33,359 32k INFO ====> Epoch: 73
164
+ 2023-02-07 13:15:47,755 32k INFO ====> Epoch: 74
165
+ 2023-02-07 13:18:01,568 32k INFO ====> Epoch: 75
166
+ 2023-02-07 13:18:24,282 32k INFO Train Epoch: 76 [0%]
167
+ 2023-02-07 13:18:24,282 32k INFO [2.4348723888397217, 2.264528274536133, 12.597403526306152, 17.98801040649414, 0.9530618190765381, 6600, 9.906682277864462e-05]
168
+ 2023-02-07 13:20:16,324 32k INFO ====> Epoch: 76
169
+ 2023-02-07 13:22:39,984 32k INFO ====> Epoch: 77
170
+ 2023-02-07 13:23:33,800 32k INFO Train Epoch: 78 [27%]
171
+ 2023-02-07 13:23:33,801 32k INFO [2.5431196689605713, 2.1168220043182373, 11.997557640075684, 15.961959838867188, 1.086801528930664, 6800, 9.904205762086905e-05]
172
+ 2023-02-07 13:24:54,267 32k INFO ====> Epoch: 78
173
+ 2023-02-07 13:27:09,425 32k INFO ====> Epoch: 79
174
+ 2023-02-07 13:28:32,803 32k INFO Train Epoch: 80 [55%]
175
+ 2023-02-07 13:28:32,803 32k INFO [2.5890021324157715, 1.954834222793579, 11.328150749206543, 18.769920349121094, 1.155487060546875, 7000, 9.901729865399597e-05]
176
+ 2023-02-07 13:28:37,490 32k INFO Saving model and optimizer state at iteration 80 to ./logs\32k\G_7000.pth
177
+ 2023-02-07 13:28:53,607 32k INFO Saving model and optimizer state at iteration 80 to ./logs\32k\D_7000.pth
178
+ 2023-02-07 13:29:47,400 32k INFO ====> Epoch: 80
179
+ 2023-02-07 13:32:00,461 32k INFO ====> Epoch: 81
180
+ 2023-02-07 13:33:55,048 32k INFO Train Epoch: 82 [82%]
181
+ 2023-02-07 13:33:55,049 32k INFO [2.2798125743865967, 2.5595874786376953, 8.555220603942871, 16.308244705200195, 0.8951782584190369, 7200, 9.899254587647776e-05]
182
+ 2023-02-07 13:34:14,940 32k INFO ====> Epoch: 82
183
+ 2023-02-07 13:36:27,447 32k INFO ====> Epoch: 83
184
+ 2023-02-07 13:38:39,624 32k INFO ====> Epoch: 84
185
+ 2023-02-07 13:39:13,048 32k INFO Train Epoch: 85 [9%]
186
+ 2023-02-07 13:39:13,048 32k INFO [2.489978790283203, 2.357851505279541, 11.63048267364502, 19.388751983642578, 0.6406951546669006, 7400, 9.895542831185631e-05]
187
+ 2023-02-07 13:40:54,422 32k INFO ====> Epoch: 85
188
+ 2023-02-07 13:43:08,509 32k INFO ====> Epoch: 86
189
+ 2023-02-07 13:44:55,190 32k INFO Train Epoch: 87 [36%]
190
+ 2023-02-07 13:44:55,191 32k INFO [2.4624717235565186, 2.577338933944702, 12.478321075439453, 18.478761672973633, 0.7297022938728333, 7600, 9.89306910009569e-05]
191
+ 2023-02-07 13:47:39,853 32k INFO ====> Epoch: 87
192
+ 2023-02-07 13:52:32,007 32k INFO ====> Epoch: 88
193
+ 2023-02-07 13:55:27,410 32k INFO Train Epoch: 89 [64%]
194
+ 2023-02-07 13:55:27,411 32k INFO [2.430572986602783, 2.4413130283355713, 12.744434356689453, 18.379131317138672, 0.8439734578132629, 7800, 9.89059598739987e-05]
195
+ 2023-02-07 13:56:41,864 32k INFO ====> Epoch: 89
196
+ 2023-02-07 14:00:08,771 32k INFO ====> Epoch: 90
197
+ 2023-02-07 14:02:55,544 32k INFO Train Epoch: 91 [91%]
198
+ 2023-02-07 14:02:55,544 32k INFO [2.561445474624634, 2.1027495861053467, 12.336583137512207, 17.24584197998047, 0.9410633444786072, 8000, 9.888123492943583e-05]
199
+ 2023-02-07 14:03:02,073 32k INFO Saving model and optimizer state at iteration 91 to ./logs\32k\G_8000.pth
200
+ 2023-02-07 14:03:19,461 32k INFO Saving model and optimizer state at iteration 91 to ./logs\32k\D_8000.pth
201
+ 2023-02-07 14:03:44,251 32k INFO ====> Epoch: 91
202
+ 2023-02-07 14:07:20,629 32k INFO ====> Epoch: 92
203
+ 2023-02-07 14:09:55,893 32k INFO ====> Epoch: 93
204
+ 2023-02-07 14:10:51,286 32k INFO Train Epoch: 94 [18%]
205
+ 2023-02-07 14:10:51,286 32k INFO [2.446747303009033, 2.4493002891540527, 12.713964462280273, 18.375844955444336, 0.9940114617347717, 8200, 9.884415910120204e-05]
206
+ 2023-02-07 14:12:34,389 32k INFO ====> Epoch: 94
207
+ 2023-02-07 14:14:49,239 32k INFO ====> Epoch: 95
208
+ 2023-02-07 14:16:04,764 32k INFO Train Epoch: 96 [45%]
209
+ 2023-02-07 14:16:04,764 32k INFO [2.501119613647461, 2.320194959640503, 12.225476264953613, 16.58141326904297, 0.6507038474082947, 8400, 9.881944960586671e-05]
210
+ 2023-02-07 14:17:05,214 32k INFO ====> Epoch: 96
211
+ 2023-02-07 14:19:18,111 32k INFO ====> Epoch: 97
212
+ 2023-02-07 14:21:32,746 32k INFO Train Epoch: 98 [73%]
213
+ 2023-02-07 14:21:32,747 32k INFO [2.3718044757843018, 2.317812204360962, 12.397613525390625, 18.040645599365234, 0.6666255593299866, 8600, 9.879474628751914e-05]
214
+ 2023-02-07 14:22:02,311 32k INFO ====> Epoch: 98
215
+ 2023-02-07 14:24:14,061 32k INFO ====> Epoch: 99
216
+ 2023-02-07 14:26:28,249 32k INFO ====> Epoch: 100
217
+ 2023-02-07 14:26:51,168 32k INFO Train Epoch: 101 [0%]
218
+ 2023-02-07 14:26:51,168 32k INFO [2.661276340484619, 1.9135569334030151, 7.072793006896973, 14.621119499206543, 1.1172854900360107, 8800, 9.875770288847208e-05]
219
+ 2023-02-07 14:28:41,419 32k INFO ====> Epoch: 101
220
+ 2023-02-07 14:30:54,121 32k INFO ====> Epoch: 102
221
+ 2023-02-07 14:31:47,886 32k INFO Train Epoch: 103 [27%]
222
+ 2023-02-07 14:31:47,886 32k INFO [2.3931992053985596, 2.2867274284362793, 11.25641918182373, 17.3231143951416, 0.8995411992073059, 9000, 9.873301500583906e-05]
223
+ 2023-02-07 14:31:52,318 32k INFO Saving model and optimizer state at iteration 103 to ./logs\32k\G_9000.pth
224
+ 2023-02-07 14:32:10,592 32k INFO Saving model and optimizer state at iteration 103 to ./logs\32k\D_9000.pth
225
+ 2023-02-07 14:33:33,837 32k INFO ====> Epoch: 103
226
+ 2023-02-07 14:35:47,392 32k INFO ====> Epoch: 104
227
+ 2023-02-07 14:37:10,995 32k INFO Train Epoch: 105 [55%]
228
+ 2023-02-07 14:37:10,995 32k INFO [2.6397929191589355, 1.9650437831878662, 9.858829498291016, 16.57013511657715, 0.5786668062210083, 9200, 9.870833329479095e-05]
229
+ 2023-02-07 14:38:01,496 32k INFO ====> Epoch: 105
230
+ 2023-02-07 14:40:15,064 32k INFO ====> Epoch: 106
231
+ 2023-02-07 14:42:10,744 32k INFO Train Epoch: 107 [82%]
232
+ 2023-02-07 14:42:10,745 32k INFO [2.5161538124084473, 2.1780292987823486, 12.834193229675293, 17.641633987426758, 0.9528986811637878, 9400, 9.868365775378495e-05]
233
+ 2023-02-07 14:42:30,034 32k INFO ====> Epoch: 107
234
+ 2023-02-07 14:45:07,976 32k INFO ====> Epoch: 108
235
+ 2023-02-07 14:47:20,329 32k INFO ====> Epoch: 109
236
+ 2023-02-07 14:47:53,047 32k INFO Train Epoch: 110 [9%]
237
+ 2023-02-07 14:47:53,048 32k INFO [2.442427635192871, 2.339263439178467, 11.993571281433105, 15.568887710571289, 0.6255054473876953, 9600, 9.864665600773098e-05]
238
+ 2023-02-07 14:49:33,420 32k INFO ====> Epoch: 110
239
+ 2023-02-07 14:51:46,975 32k INFO ====> Epoch: 111
240
+ 2023-02-07 14:52:49,425 32k INFO Train Epoch: 112 [36%]
241
+ 2023-02-07 14:52:49,425 32k INFO [2.4783666133880615, 2.35465145111084, 11.851914405822754, 16.24662208557129, 0.18825632333755493, 9800, 9.862199588508305e-05]
242
+ 2023-02-07 14:53:58,353 32k INFO ====> Epoch: 112
243
+ 2023-02-07 14:56:13,225 32k INFO ====> Epoch: 113
244
+ 2023-02-07 14:57:45,173 32k INFO Train Epoch: 114 [64%]
245
+ 2023-02-07 14:57:45,173 32k INFO [2.1118600368499756, 2.582984209060669, 7.736230850219727, 16.275470733642578, 0.714296817779541, 10000, 9.859734192708044e-05]
246
+ 2023-02-07 14:57:49,644 32k INFO Saving model and optimizer state at iteration 114 to ./logs\32k\G_10000.pth
247
+ 2023-02-07 14:58:06,448 32k INFO Saving model and optimizer state at iteration 114 to ./logs\32k\D_10000.pth
248
+ 2023-02-07 14:58:49,496 32k INFO ====> Epoch: 114
249
+ 2023-02-07 15:01:03,128 32k INFO ====> Epoch: 115
250
+ 2023-02-07 15:03:06,945 32k INFO Train Epoch: 116 [91%]
251
+ 2023-02-07 15:03:06,945 32k INFO [2.4191789627075195, 2.259423017501831, 10.048250198364258, 17.279565811157227, 0.4913417398929596, 10200, 9.857269413218213e-05]
252
+ 2023-02-07 15:03:16,527 32k INFO ====> Epoch: 116
253
+ 2023-02-07 15:05:28,914 32k INFO ====> Epoch: 117
254
+ 2023-02-07 15:07:40,479 32k INFO ====> Epoch: 118
255
+ 2023-02-07 15:08:23,214 32k INFO Train Epoch: 119 [18%]
256
+ 2023-02-07 15:08:23,214 32k INFO [1.971365213394165, 2.894338607788086, 10.93272876739502, 15.680130004882812, 0.8430463671684265, 10400, 9.853573399228505e-05]
257
+ 2023-02-07 15:09:53,437 32k INFO ====> Epoch: 119
258
+ 2023-02-07 15:12:05,648 32k INFO ====> Epoch: 120
259
+ 2023-02-07 15:13:18,841 32k INFO Train Epoch: 121 [45%]
260
+ 2023-02-07 15:13:18,841 32k INFO [2.4416098594665527, 2.3172998428344727, 11.274493217468262, 18.475496292114258, 0.600033164024353, 10600, 9.851110159840781e-05]
261
+ 2023-02-07 15:14:19,433 32k INFO ====> Epoch: 121
262
+ 2023-02-07 15:16:31,421 32k INFO ====> Epoch: 122
263
+ 2023-02-07 15:18:15,901 32k INFO Train Epoch: 123 [73%]
264
+ 2023-02-07 15:18:15,901 32k INFO [2.402225971221924, 2.4027457237243652, 14.501314163208008, 18.68280029296875, 1.2635655403137207, 10800, 9.848647536224416e-05]
265
+ 2023-02-07 15:18:45,304 32k INFO ====> Epoch: 123
266
+ 2023-02-07 15:20:56,412 32k INFO ====> Epoch: 124
267
+ 2023-02-07 15:23:09,824 32k INFO ====> Epoch: 125
268
+ 2023-02-07 15:23:32,774 32k INFO Train Epoch: 126 [0%]
269
+ 2023-02-07 15:23:32,775 32k INFO [2.6220815181732178, 2.037874221801758, 10.186017036437988, 16.901756286621094, 0.6591813564300537, 11000, 9.84495475503445e-05]
270
+ 2023-02-07 15:23:37,377 32k INFO Saving model and optimizer state at iteration 126 to ./logs\32k\G_11000.pth
271
+ 2023-02-07 15:23:53,672 32k INFO Saving model and optimizer state at iteration 126 to ./logs\32k\D_11000.pth
272
+ 2023-02-07 15:25:45,526 32k INFO ====> Epoch: 126
273
+ 2023-02-07 15:27:59,414 32k INFO ====> Epoch: 127
274
+ 2023-02-07 15:28:52,412 32k INFO Train Epoch: 128 [27%]
275
+ 2023-02-07 15:28:52,412 32k INFO [2.4518520832061768, 2.2550909519195557, 11.731948852539062, 15.13012409210205, 1.0153660774230957, 11200, 9.842493670173108e-05]
276
+ 2023-02-07 15:30:12,852 32k INFO ====> Epoch: 128
277
+ 2023-02-07 15:32:25,533 32k INFO ====> Epoch: 129
278
+ 2023-02-07 15:33:48,107 32k INFO Train Epoch: 130 [55%]
279
+ 2023-02-07 15:33:48,107 32k INFO [2.4804108142852783, 2.040149688720703, 10.706924438476562, 15.55229377746582, 0.7179027795791626, 11400, 9.840033200544528e-05]
280
+ 2023-02-07 15:34:38,147 32k INFO ====> Epoch: 130
281
+ 2023-02-07 15:36:52,502 32k INFO ====> Epoch: 131
282
+ 2023-02-07 15:38:44,776 32k INFO Train Epoch: 132 [82%]
283
+ 2023-02-07 15:38:44,776 32k INFO [2.5530967712402344, 2.3860535621643066, 12.224138259887695, 17.293977737426758, 0.9359664916992188, 11600, 9.837573345994909e-05]
284
+ 2023-02-07 15:39:04,700 32k INFO ====> Epoch: 132
285
+ 2023-02-07 15:41:17,332 32k INFO ====> Epoch: 133
286
+ 2023-02-07 15:43:29,565 32k INFO ====> Epoch: 134
287
+ 2023-02-07 15:44:02,326 32k INFO Train Epoch: 135 [9%]
288
+ 2023-02-07 15:44:02,326 32k INFO [2.4722814559936523, 2.3017029762268066, 12.974905967712402, 15.865802764892578, 0.6799976229667664, 11800, 9.833884717107196e-05]
289
+ 2023-02-07 15:45:42,945 32k INFO ====> Epoch: 135
290
+ 2023-02-07 15:47:57,595 32k INFO ====> Epoch: 136
291
+ 2023-02-07 15:49:01,296 32k INFO Train Epoch: 137 [36%]
292
+ 2023-02-07 15:49:01,297 32k INFO [2.5428762435913086, 2.3497259616851807, 13.335613250732422, 17.625024795532227, 0.8169641494750977, 12000, 9.831426399582366e-05]
293
+ 2023-02-07 15:49:05,721 32k INFO Saving model and optimizer state at iteration 137 to ./logs\32k\G_12000.pth
294
+ 2023-02-07 15:49:24,121 32k INFO Saving model and optimizer state at iteration 137 to ./logs\32k\D_12000.pth
295
+ 2023-02-07 15:50:37,819 32k INFO ====> Epoch: 137
296
+ 2023-02-07 15:52:51,111 32k INFO ====> Epoch: 138
297
+ 2023-02-07 15:54:25,928 32k INFO Train Epoch: 139 [64%]
298
+ 2023-02-07 15:54:25,928 32k INFO [2.431962490081787, 2.0768401622772217, 11.875350952148438, 16.097793579101562, 0.7626942992210388, 12200, 9.828968696598508e-05]
299
+ 2023-02-07 15:55:05,314 32k INFO ====> Epoch: 139
300
+ 2023-02-07 15:57:18,392 32k INFO ====> Epoch: 140
301
+ 2023-02-07 15:59:21,946 32k INFO Train Epoch: 141 [91%]
302
+ 2023-02-07 15:59:21,947 32k INFO [2.2864203453063965, 2.4313251972198486, 9.686891555786133, 14.229582786560059, 0.9563338756561279, 12400, 9.826511608001993e-05]
303
+ 2023-02-07 15:59:31,745 32k INFO ====> Epoch: 141
304
+ 2023-02-07 16:01:42,737 32k INFO ====> Epoch: 142
305
+ 2023-02-07 16:03:54,591 32k INFO ====> Epoch: 143
306
+ 2023-02-07 16:04:37,359 32k INFO Train Epoch: 144 [18%]
307
+ 2023-02-07 16:04:37,359 32k INFO [2.4183509349823, 2.2550737857818604, 11.305303573608398, 17.390254974365234, 0.8337610363960266, 12600, 9.822827126747529e-05]
308
+ 2023-02-07 16:06:07,140 32k INFO ====> Epoch: 144
309
+ 2023-02-07 16:08:19,593 32k INFO ====> Epoch: 145
310
+ 2023-02-07 16:09:34,863 32k INFO Train Epoch: 146 [45%]
311
+ 2023-02-07 16:09:34,863 32k INFO [2.463930130004883, 2.106257200241089, 10.743441581726074, 14.500619888305664, 0.16743861138820648, 12800, 9.820371573447515e-05]
312
+ 2023-02-07 16:10:34,381 32k INFO ====> Epoch: 146
313
+ 2023-02-07 16:12:47,597 32k INFO ====> Epoch: 147
314
+ 2023-02-07 16:14:31,055 32k INFO Train Epoch: 148 [73%]
315
+ 2023-02-07 16:14:31,055 32k INFO [2.6500000953674316, 2.072178602218628, 9.274537086486816, 13.794744491577148, 0.861379861831665, 13000, 9.817916633997459e-05]
316
+ 2023-02-07 16:14:35,597 32k INFO Saving model and optimizer state at iteration 148 to ./logs\32k\G_13000.pth
317
+ 2023-02-07 16:14:53,711 32k INFO Saving model and optimizer state at iteration 148 to ./logs\32k\D_13000.pth
318
+ 2023-02-07 16:15:26,054 32k INFO ====> Epoch: 148
319
+ 2023-02-07 16:17:40,362 32k INFO ====> Epoch: 149
320
+ 2023-02-07 16:19:54,446 32k INFO ====> Epoch: 150
321
+ 2023-02-07 16:20:17,171 32k INFO Train Epoch: 151 [0%]
322
+ 2023-02-07 16:20:17,171 32k INFO [2.458037853240967, 2.1006457805633545, 10.140434265136719, 15.663694381713867, 0.6432273387908936, 13200, 9.814235375455375e-05]
323
+ 2023-02-07 16:22:07,723 32k INFO ====> Epoch: 151
324
+ 2023-02-07 16:24:36,251 32k INFO ====> Epoch: 152
325
+ 2023-02-07 16:26:17,326 32k INFO Train Epoch: 153 [27%]
326
+ 2023-02-07 16:26:17,327 32k INFO [2.397064447402954, 2.1317219734191895, 14.287264823913574, 16.660289764404297, 0.4551805853843689, 13400, 9.811781969958938e-05]
327
+ 2023-02-07 16:28:47,391 32k INFO ====> Epoch: 153
328
+ 2023-02-07 16:31:01,375 32k INFO ====> Epoch: 154
329
+ 2023-02-07 16:32:32,011 32k INFO Train Epoch: 155 [55%]
330
+ 2023-02-07 16:32:32,012 32k INFO [2.364081382751465, 2.2779157161712646, 10.857946395874023, 17.385780334472656, 0.44523704051971436, 13600, 9.809329177775541e-05]
331
+ 2023-02-07 16:33:24,592 32k INFO ====> Epoch: 155
332
+ 2023-02-07 16:35:51,324 32k INFO ====> Epoch: 156
333
+ 2023-02-07 16:37:44,299 32k INFO Train Epoch: 157 [82%]
334
+ 2023-02-07 16:37:44,299 32k INFO [2.509974718093872, 2.240321159362793, 10.872742652893066, 16.8741512298584, 0.6555373668670654, 13800, 9.806876998751865e-05]
335
+ 2023-02-07 16:38:03,447 32k INFO ====> Epoch: 157
336
+ 2023-02-07 16:40:17,033 32k INFO ====> Epoch: 158
337
+ 2023-02-07 16:42:44,793 32k INFO ====> Epoch: 159
338
+ 2023-02-07 16:43:20,826 32k INFO Train Epoch: 160 [9%]
339
+ 2023-02-07 16:43:20,826 32k INFO [2.423384428024292, 2.4248738288879395, 11.930776596069336, 16.296314239501953, 0.42344143986701965, 14000, 9.803199879555537e-05]
340
+ 2023-02-07 16:43:25,921 32k INFO Saving model and optimizer state at iteration 160 to ./logs\32k\G_14000.pth
341
+ 2023-02-07 16:43:43,660 32k INFO Saving model and optimizer state at iteration 160 to ./logs\32k\D_14000.pth
342
+ 2023-02-07 16:46:17,844 32k INFO ====> Epoch: 160
343
+ 2023-02-07 16:49:43,811 32k INFO ====> Epoch: 161
344
+ 2023-02-07 16:51:59,890 32k INFO Train Epoch: 162 [36%]
345
+ 2023-02-07 16:51:59,890 32k INFO [2.374708652496338, 2.5799448490142822, 8.881999969482422, 15.167359352111816, 0.5163499116897583, 14200, 9.800749232760646e-05]
346
+ 2023-02-07 16:53:37,318 32k INFO ====> Epoch: 162
347
+ 2023-02-07 16:55:49,815 32k INFO ====> Epoch: 163
348
+ 2023-02-07 16:57:23,388 32k INFO Train Epoch: 164 [64%]
349
+ 2023-02-07 16:57:23,389 32k INFO [2.470241069793701, 2.37218976020813, 9.033040046691895, 14.695220947265625, 0.9334152936935425, 14400, 9.798299198589162e-05]
350
+ 2023-02-07 16:58:03,354 32k INFO ====> Epoch: 164
351
+ 2023-02-07 17:00:16,412 32k INFO ====> Epoch: 165
352
+ 2023-02-07 17:02:20,963 32k INFO Train Epoch: 166 [91%]
353
+ 2023-02-07 17:02:20,964 32k INFO [2.600351333618164, 2.1910109519958496, 8.723526954650879, 14.268444061279297, 1.0583291053771973, 14600, 9.795849776887939e-05]
354
+ 2023-02-07 17:02:34,097 32k INFO ====> Epoch: 166
355
+ 2023-02-07 17:05:33,762 32k INFO ====> Epoch: 167
356
+ 2023-02-07 17:08:26,547 32k INFO ====> Epoch: 168
357
+ 2023-02-07 17:09:12,656 32k INFO Train Epoch: 169 [18%]
358
+ 2023-02-07 17:09:12,656 32k INFO [2.4051764011383057, 2.416426658630371, 11.779657363891602, 18.174877166748047, 0.8009364008903503, 14800, 9.792176792382932e-05]
359
+ 2023-02-07 17:10:42,842 32k INFO ====> Epoch: 169
360
+ 2023-02-07 17:12:55,974 32k INFO ====> Epoch: 170
361
+ 2023-02-07 17:14:08,952 32k INFO Train Epoch: 171 [45%]
362
+ 2023-02-07 17:14:08,953 32k INFO [2.5530202388763428, 2.314896583557129, 12.335034370422363, 18.924882888793945, 0.3423107862472534, 15000, 9.789728901187598e-05]
363
+ 2023-02-07 17:14:13,613 32k INFO Saving model and optimizer state at iteration 171 to ./logs\32k\G_15000.pth
364
+ 2023-02-07 17:14:31,179 32k INFO Saving model and optimizer state at iteration 171 to ./logs\32k\D_15000.pth
365
+ 2023-02-07 17:15:34,281 32k INFO ====> Epoch: 171
366
+ 2023-02-07 17:17:47,533 32k INFO ====> Epoch: 172
367
+ 2023-02-07 17:19:31,440 32k INFO Train Epoch: 173 [73%]
368
+ 2023-02-07 17:19:31,441 32k INFO [2.415375232696533, 2.302281379699707, 13.263263702392578, 17.304351806640625, 1.09013831615448, 15200, 9.787281621926815e-05]
369
+ 2023-02-07 17:20:00,640 32k INFO ====> Epoch: 173
370
+ 2023-02-07 17:22:13,373 32k INFO ====> Epoch: 174
371
+ 2023-02-07 17:24:27,188 32k INFO ====> Epoch: 175
372
+ 2023-02-07 17:24:49,986 32k INFO Train Epoch: 176 [0%]
373
+ 2023-02-07 17:24:49,987 32k INFO [2.4495227336883545, 2.2643768787384033, 13.395185470581055, 18.15376853942871, 0.45867687463760376, 15400, 9.783611850078301e-05]
374
+ 2023-02-07 17:26:41,785 32k INFO ====> Epoch: 176
375
+ 2023-02-07 17:28:53,950 32k INFO ====> Epoch: 177
376
+ 2023-02-07 17:29:46,647 32k INFO Train Epoch: 178 [27%]
377
+ 2023-02-07 17:29:46,647 32k INFO [2.297741651535034, 2.258887529373169, 12.602045059204102, 16.67408561706543, 0.6464719772338867, 15600, 9.781166099984716e-05]
378
+ 2023-02-07 17:31:07,602 32k INFO ====> Epoch: 178
379
+ 2023-02-07 17:33:21,685 32k INFO ====> Epoch: 179
380
+ 2023-02-07 17:34:45,434 32k INFO Train Epoch: 180 [55%]
381
+ 2023-02-07 17:34:45,434 32k INFO [2.4268381595611572, 2.02866792678833, 7.66528844833374, 12.839248657226562, 0.8188288807868958, 15800, 9.778720961290439e-05]
382
+ 2023-02-07 17:35:35,680 32k INFO ====> Epoch: 180
383
+ 2023-02-07 17:37:48,992 32k INFO ====> Epoch: 181
384
+ 2023-02-07 17:39:42,266 32k INFO Train Epoch: 182 [82%]
385
+ 2023-02-07 17:39:42,267 32k INFO [2.4471888542175293, 2.309143543243408, 14.801136016845703, 16.42742919921875, 0.8475450277328491, 16000, 9.776276433842631e-05]
386
+ 2023-02-07 17:39:46,929 32k INFO Saving model and optimizer state at iteration 182 to ./logs\32k\G_16000.pth
387
+ 2023-02-07 17:40:06,027 32k INFO Saving model and optimizer state at iteration 182 to ./logs\32k\D_16000.pth
388
+ 2023-02-07 17:40:28,918 32k INFO ====> Epoch: 182
389
+ 2023-02-07 17:42:42,741 32k INFO ====> Epoch: 183
390
+ 2023-02-07 17:44:55,233 32k INFO ====> Epoch: 184
391
+ 2023-02-07 17:45:28,630 32k INFO Train Epoch: 185 [9%]
392
+ 2023-02-07 17:45:28,630 32k INFO [2.4997944831848145, 2.3628575801849365, 9.75255012512207, 15.626605987548828, 0.8665064573287964, 16200, 9.772610788423802e-05]
393
+ 2023-02-07 17:47:09,398 32k INFO ====> Epoch: 185
394
+ 2023-02-07 17:49:21,874 32k INFO ====> Epoch: 186
395
+ 2023-02-07 17:50:26,652 32k INFO Train Epoch: 187 [36%]
396
+ 2023-02-07 17:50:26,653 32k INFO [2.6311392784118652, 2.372824192047119, 11.809494018554688, 17.118436813354492, 0.47372499108314514, 16400, 9.77016778842374e-05]
397
+ 2023-02-07 17:51:35,978 32k INFO ====> Epoch: 187
398
+ 2023-02-07 17:53:49,374 32k INFO ====> Epoch: 188
399
+ 2023-02-07 17:55:23,233 32k INFO Train Epoch: 189 [64%]
400
+ 2023-02-07 17:55:23,233 32k INFO [2.4115920066833496, 2.6004247665405273, 10.292720794677734, 13.990850448608398, 0.6128233671188354, 16600, 9.767725399135504e-05]
401
+ 2023-02-07 17:56:02,183 32k INFO ====> Epoch: 189
402
+ 2023-02-07 17:58:15,629 32k INFO ====> Epoch: 190
403
+ 2023-02-07 18:00:18,697 32k INFO Train Epoch: 191 [91%]
404
+ 2023-02-07 18:00:18,698 32k INFO [2.476590633392334, 2.3959107398986816, 9.825133323669434, 17.272159576416016, 0.5982983112335205, 16800, 9.765283620406429e-05]
405
+ 2023-02-07 18:00:28,315 32k INFO ====> Epoch: 191
406
+ 2023-02-07 18:02:42,561 32k INFO ====> Epoch: 192
407
+ 2023-02-07 18:04:54,987 32k INFO ====> Epoch: 193
408
+ 2023-02-07 18:05:37,595 32k INFO Train Epoch: 194 [18%]
409
+ 2023-02-07 18:05:37,595 32k INFO [2.5469231605529785, 2.261059045791626, 12.21806526184082, 18.74179458618164, 0.7255897521972656, 17000, 9.761622096777372e-05]
410
+ 2023-02-07 18:05:42,091 32k INFO Saving model and optimizer state at iteration 194 to ./logs\32k\G_17000.pth
411
+ 2023-02-07 18:06:01,137 32k INFO Saving model and optimizer state at iteration 194 to ./logs\32k\D_17000.pth
412
+ 2023-02-07 18:07:34,248 32k INFO ====> Epoch: 194
413
+ 2023-02-07 18:09:47,906 32k INFO ====> Epoch: 195
414
+ 2023-02-07 18:11:02,084 32k INFO Train Epoch: 196 [45%]
415
+ 2023-02-07 18:11:02,084 32k INFO [2.4885900020599365, 2.383333683013916, 9.689787864685059, 13.685766220092773, 0.9633486866950989, 17200, 9.759181843778522e-05]
416
+ 2023-02-07 18:12:02,361 32k INFO ====> Epoch: 196
417
+ 2023-02-07 18:16:49,498 32k INFO ====> Epoch: 197
418
+ 2023-02-07 18:20:33,618 32k INFO Train Epoch: 198 [73%]
419
+ 2023-02-07 18:20:33,618 32k INFO [2.499962568283081, 2.4040303230285645, 12.432945251464844, 17.229080200195312, 0.8561845421791077, 17400, 9.756742200804793e-05]
420
+ 2023-02-07 18:21:47,702 32k INFO ====> Epoch: 198
421
+ 2023-02-07 18:26:57,720 32k INFO ====> Epoch: 199
422
+ 2023-02-07 18:31:17,795 32k INFO ====> Epoch: 200
423
+ 2023-02-07 18:31:43,198 32k INFO Train Epoch: 201 [0%]
424
+ 2023-02-07 18:31:43,198 32k INFO [2.4892737865448, 2.2194271087646484, 12.615705490112305, 16.821392059326172, 1.0796860456466675, 17600, 9.753083879807726e-05]
425
+ 2023-02-07 18:36:01,686 32k INFO ====> Epoch: 201
426
+ 2023-02-07 18:40:55,903 32k INFO ====> Epoch: 202
427
+ 2023-02-07 18:42:29,324 32k INFO Train Epoch: 203 [27%]
428
+ 2023-02-07 18:42:29,325 32k INFO [2.3428452014923096, 2.189953088760376, 12.907392501831055, 15.840951919555664, 0.48792731761932373, 17800, 9.750645761229709e-05]
429
+ 2023-02-07 18:45:49,509 32k INFO ====> Epoch: 203
430
+ 2023-02-07 18:50:48,031 32k INFO ====> Epoch: 204
431
+ 2023-02-07 18:53:31,402 32k INFO Train Epoch: 205 [55%]
432
+ 2023-02-07 18:53:31,402 32k INFO [2.4485435485839844, 2.108217716217041, 11.225994110107422, 14.583245277404785, 0.8323250412940979, 18000, 9.748208252143241e-05]
433
+ 2023-02-07 18:53:35,876 32k INFO Saving model and optimizer state at iteration 205 to ./logs\32k\G_18000.pth
434
+ 2023-02-07 18:53:52,648 32k INFO Saving model and optimizer state at iteration 205 to ./logs\32k\D_18000.pth
435
+ 2023-02-07 18:55:56,093 32k INFO ====> Epoch: 205
436
+ 2023-02-07 19:00:33,984 32k INFO ====> Epoch: 206
437
+ 2023-02-07 19:04:49,445 32k INFO Train Epoch: 207 [82%]
438
+ 2023-02-07 19:04:49,446 32k INFO [2.674133062362671, 2.064514636993408, 5.687127113342285, 13.519594192504883, 0.5315198302268982, 18200, 9.745771352395957e-05]
439
+ 2023-02-07 19:05:35,719 32k INFO ====> Epoch: 207
440
+ 2023-02-07 19:10:13,777 32k INFO ====> Epoch: 208
441
+ 2023-02-07 19:14:47,548 32k INFO ====> Epoch: 209
442
+ 2023-02-07 19:15:35,383 32k INFO Train Epoch: 210 [9%]
443
+ 2023-02-07 19:15:35,383 32k INFO [2.5437965393066406, 2.2913265228271484, 13.787505149841309, 18.729778289794922, 1.0571695566177368, 18400, 9.742117144952805e-05]
444
+ 2023-02-07 19:19:16,628 32k INFO ====> Epoch: 210
445
+ 2023-02-07 19:23:59,758 32k INFO ====> Epoch: 211
446
+ 2023-02-07 19:26:01,081 32k INFO Train Epoch: 212 [36%]
447
+ 2023-02-07 19:26:01,081 32k INFO [2.5189802646636963, 2.5148799419403076, 15.009998321533203, 17.97093391418457, 0.9030418395996094, 18600, 9.739681767887146e-05]
448
+ 2023-02-07 19:28:45,806 32k INFO ====> Epoch: 212
449
+ 2023-02-07 19:33:39,228 32k INFO ====> Epoch: 213
450
+ 2023-02-07 19:36:49,889 32k INFO Train Epoch: 214 [64%]
451
+ 2023-02-07 19:36:49,889 32k INFO [2.323404312133789, 2.521040916442871, 12.572664260864258, 14.952072143554688, 0.69417405128479, 18800, 9.7372469996277e-05]
452
+ 2023-02-07 19:38:15,620 32k INFO ====> Epoch: 214
453
+ 2023-02-07 19:43:04,992 32k INFO ====> Epoch: 215
454
+ 2023-02-07 19:47:35,219 32k INFO Train Epoch: 216 [91%]
455
+ 2023-02-07 19:47:35,219 32k INFO [2.5679819583892822, 2.0207834243774414, 8.832568168640137, 14.715601921081543, 0.6317256093025208, 19000, 9.734812840022278e-05]
456
+ 2023-02-07 19:47:39,759 32k INFO Saving model and optimizer state at iteration 216 to ./logs\32k\G_19000.pth
457
+ 2023-02-07 19:47:57,845 32k INFO Saving model and optimizer state at iteration 216 to ./logs\32k\D_19000.pth
458
+ 2023-02-07 19:48:27,521 32k INFO ====> Epoch: 216
459
+ 2023-02-07 19:53:09,847 32k INFO ====> Epoch: 217
460
+ 2023-02-07 19:57:51,694 32k INFO ====> Epoch: 218
461
+ 2023-02-07 19:59:01,703 32k INFO Train Epoch: 219 [18%]
462
+ 2023-02-07 19:59:01,704 32k INFO [2.5336737632751465, 2.1829769611358643, 12.086270332336426, 18.775772094726562, 0.6264281868934631, 19200, 9.731162741507607e-05]
463
+ 2023-02-07 20:02:29,398 32k INFO ====> Epoch: 219
464
+ 2023-02-07 20:07:18,152 32k INFO ====> Epoch: 220
465
+ 2023-02-07 20:09:32,665 32k INFO Train Epoch: 221 [45%]
466
+ 2023-02-07 20:09:32,666 32k INFO [2.55208683013916, 2.1489357948303223, 9.024528503417969, 12.344110488891602, 0.21028947830200195, 19400, 9.728730102871649e-05]
467
+ 2023-02-07 20:12:01,864 32k INFO ====> Epoch: 221
468
+ 2023-02-07 20:17:21,537 32k INFO ====> Epoch: 222
469
+ 2023-02-07 20:21:03,459 32k INFO Train Epoch: 223 [73%]
470
+ 2023-02-07 20:21:03,460 32k INFO [2.3493776321411133, 2.453810214996338, 10.56823444366455, 16.656709671020508, 0.6454640626907349, 19600, 9.726298072357337e-05]
471
+ 2023-02-07 20:22:13,720 32k INFO ====> Epoch: 223
472
+ 2023-02-07 20:27:00,082 32k INFO ====> Epoch: 224
473
+ 2023-02-07 20:31:49,803 32k INFO ====> Epoch: 225
474
+ 2023-02-07 20:32:13,328 32k INFO Train Epoch: 226 [0%]
475
+ 2023-02-07 20:32:13,329 32k INFO [2.4435298442840576, 2.232247829437256, 13.443621635437012, 17.813751220703125, 0.37300077080726624, 19800, 9.722651166481428e-05]
476
+ 2023-02-07 20:36:33,574 32k INFO ====> Epoch: 226
477
+ 2023-02-07 20:41:13,736 32k INFO ====> Epoch: 227
478
+ 2023-02-07 20:42:51,449 32k INFO Train Epoch: 228 [27%]
479
+ 2023-02-07 20:42:51,449 32k INFO [2.326092004776001, 2.2804453372955322, 11.041282653808594, 14.598264694213867, 0.5883763432502747, 20000, 9.720220655606233e-05]
480
+ 2023-02-07 20:42:55,938 32k INFO Saving model and optimizer state at iteration 228 to ./logs\32k\G_20000.pth
481
+ 2023-02-07 20:43:12,387 32k INFO Saving model and optimizer state at iteration 228 to ./logs\32k\D_20000.pth
482
+ 2023-02-07 20:46:16,362 32k INFO ====> Epoch: 228
483
+ 2023-02-07 20:48:28,613 32k INFO ====> Epoch: 229
484
+ 2023-02-07 20:49:53,099 32k INFO Train Epoch: 230 [55%]
485
+ 2023-02-07 20:49:53,100 32k INFO [2.47762393951416, 2.052753448486328, 9.87153434753418, 14.791862487792969, 0.4688096046447754, 20200, 9.717790752320778e-05]
486
+ 2023-02-07 20:50:42,766 32k INFO ====> Epoch: 230
487
+ 2023-02-07 20:53:21,743 32k INFO ====> Epoch: 231
488
+ 2023-02-07 20:55:22,768 32k INFO Train Epoch: 232 [82%]
489
+ 2023-02-07 20:55:22,768 32k INFO [2.5036845207214355, 2.3738698959350586, 12.126296997070312, 15.546420097351074, 0.8534035682678223, 20400, 9.715361456473177e-05]
490
+ 2023-02-07 20:55:38,686 32k INFO ====> Epoch: 232
491
+ 2023-02-07 20:57:30,092 32k INFO ====> Epoch: 233
492
+ 2023-02-07 20:59:21,099 32k INFO ====> Epoch: 234
493
+ 2023-02-07 20:59:52,118 32k INFO Train Epoch: 235 [9%]
494
+ 2023-02-07 20:59:52,118 32k INFO [2.664794445037842, 2.4800596237182617, 11.827136039733887, 18.01250457763672, 0.596635639667511, 20600, 9.711718651315591e-05]
495
+ 2023-02-07 21:01:16,479 32k INFO ====> Epoch: 235
496
+ 2023-02-07 21:03:07,297 32k INFO ====> Epoch: 236
497
+ 2023-02-07 21:04:02,363 32k INFO Train Epoch: 237 [36%]
498
+ 2023-02-07 21:04:02,364 32k INFO [2.4213716983795166, 2.2646498680114746, 11.426438331604004, 16.494094848632812, 1.1573891639709473, 20800, 9.709290873398365e-05]
499
+ 2023-02-07 21:04:58,950 32k INFO ====> Epoch: 237
500
+ 2023-02-07 21:06:50,639 32k INFO ====> Epoch: 238
501
+ 2023-02-07 21:08:10,151 32k INFO Train Epoch: 239 [64%]
502
+ 2023-02-07 21:08:10,151 32k INFO [2.4733920097351074, 2.557682991027832, 10.309959411621094, 17.214019775390625, 0.5586072206497192, 21000, 9.706863702387684e-05]
503
+ 2023-02-07 21:08:14,876 32k INFO Saving model and optimizer state at iteration 239 to ./logs\32k\G_21000.pth
504
+ 2023-02-07 21:08:30,907 32k INFO Saving model and optimizer state at iteration 239 to ./logs\32k\D_21000.pth
505
+ 2023-02-07 21:09:11,437 32k INFO ====> Epoch: 239
506
+ 2023-02-07 21:11:46,818 32k INFO ====> Epoch: 240
507
+ 2023-02-07 21:13:32,984 32k INFO Train Epoch: 241 [91%]
508
+ 2023-02-07 21:13:32,985 32k INFO [2.664260149002075, 2.0642004013061523, 10.666601181030273, 18.099609375, 0.731969952583313, 21200, 9.704437138131832e-05]
509
+ 2023-02-07 21:13:41,166 32k INFO ====> Epoch: 241
510
+ 2023-02-07 21:16:14,556 32k INFO ====> Epoch: 242
511
+ 2023-02-07 21:18:13,625 32k INFO ====> Epoch: 243
512
+ 2023-02-07 21:18:58,682 32k INFO Train Epoch: 244 [18%]
513
+ 2023-02-07 21:18:58,683 32k INFO [2.62353777885437, 2.2757506370544434, 9.752870559692383, 16.047285079956055, 0.5307028889656067, 21400, 9.700798429081568e-05]
514
+ 2023-02-07 21:20:17,243 32k INFO ====> Epoch: 244
515
+ 2023-02-07 21:22:09,430 32k INFO ====> Epoch: 245
516
+ 2023-02-07 21:23:12,158 32k INFO Train Epoch: 246 [45%]
517
+ 2023-02-07 21:23:12,158 32k INFO [2.376497745513916, 2.208364725112915, 10.225717544555664, 14.688340187072754, 0.6754216551780701, 21600, 9.698373381049272e-05]
518
+ 2023-02-07 21:24:00,240 32k INFO ====> Epoch: 246
519
+ 2023-02-07 21:25:50,642 32k INFO ====> Epoch: 247
520
+ 2023-02-07 21:27:17,392 32k INFO Train Epoch: 248 [73%]
521
+ 2023-02-07 21:27:17,393 32k INFO [2.221879243850708, 2.594529867172241, 9.369111061096191, 16.81258773803711, 1.0489362478256226, 21800, 9.695948939241093e-05]
522
+ 2023-02-07 21:27:41,245 32k INFO ====> Epoch: 248
523
+ 2023-02-07 21:29:31,673 32k INFO ====> Epoch: 249
524
+ 2023-02-07 21:31:21,916 32k INFO ====> Epoch: 250
525
+ 2023-02-07 21:31:44,284 32k INFO Train Epoch: 251 [0%]
526
+ 2023-02-07 21:31:44,285 32k INFO [2.5241942405700684, 2.3509469032287598, 12.539299011230469, 17.526670455932617, 0.7536979913711548, 22000, 9.692313412867544e-05]
527
+ 2023-02-07 21:31:48,766 32k INFO Saving model and optimizer state at iteration 251 to ./logs\32k\G_22000.pth
528
+ 2023-02-07 21:32:06,782 32k INFO Saving model and optimizer state at iteration 251 to ./logs\32k\D_22000.pth
529
+ 2023-02-07 21:33:38,411 32k INFO ====> Epoch: 251
530
+ 2023-02-07 21:35:28,990 32k INFO ====> Epoch: 252
531
+ 2023-02-07 21:36:15,628 32k INFO Train Epoch: 253 [27%]
532
+ 2023-02-07 21:36:15,629 32k INFO [2.3804070949554443, 2.359945058822632, 10.921401023864746, 13.723844528198242, 0.29848602414131165, 22200, 9.689890485956725e-05]
533
+ 2023-02-07 21:37:19,990 32k INFO ====> Epoch: 253
534
+ 2023-02-07 21:39:24,342 32k INFO ====> Epoch: 254
535
+ 2023-02-07 21:41:02,731 32k INFO Train Epoch: 255 [55%]
536
+ 2023-02-07 21:41:02,731 32k INFO [2.3381145000457764, 2.2890772819519043, 9.941591262817383, 14.795817375183105, 0.8626958131790161, 22400, 9.687468164739773e-05]
537
+ 2023-02-07 21:41:58,308 32k INFO ====> Epoch: 255
538
+ 2023-02-07 21:44:30,929 32k INFO ====> Epoch: 256
539
+ 2023-02-07 21:46:25,585 32k INFO Train Epoch: 257 [82%]
540
+ 2023-02-07 21:46:25,586 32k INFO [2.6762328147888184, 2.2302746772766113, 10.52723503112793, 12.506368637084961, 1.073678970336914, 22600, 9.685046449065278e-05]
541
+ 2023-02-07 21:46:42,883 32k INFO ====> Epoch: 257
542
+ 2023-02-07 21:48:33,265 32k INFO ====> Epoch: 258
543
+ 2023-02-07 21:50:23,706 32k INFO ====> Epoch: 259
544
+ 2023-02-07 21:50:54,245 32k INFO Train Epoch: 260 [9%]
545
+ 2023-02-07 21:50:54,245 32k INFO [2.3950483798980713, 2.347141742706299, 13.781299591064453, 18.84493637084961, 0.526691198348999, 22800, 9.681415010614512e-05]
546
+ 2023-02-07 21:52:14,694 32k INFO ====> Epoch: 260
547
+ 2023-02-07 21:54:05,225 32k INFO ====> Epoch: 261
548
+ 2023-02-07 21:54:59,831 32k INFO Train Epoch: 262 [36%]
549
+ 2023-02-07 21:54:59,831 32k INFO [2.555619716644287, 2.2212436199188232, 9.575003623962402, 16.177812576293945, 0.7556395530700684, 23000, 9.678994808133967e-05]
550
+ 2023-02-07 21:55:04,312 32k INFO Saving model and optimizer state at iteration 262 to ./logs\32k\G_23000.pth
551
+ 2023-02-07 21:55:22,224 32k INFO Saving model and optimizer state at iteration 262 to ./logs\32k\D_23000.pth
552
+ 2023-02-07 21:56:21,579 32k INFO ====> Epoch: 262
553
+ 2023-02-07 21:58:12,231 32k INFO ====> Epoch: 263
554
+ 2023-02-07 21:59:31,353 32k INFO Train Epoch: 264 [64%]
555
+ 2023-02-07 21:59:31,354 32k INFO [2.291395664215088, 2.593254566192627, 9.18558406829834, 14.170002937316895, 0.9322950839996338, 23200, 9.676575210666227e-05]
556
+ 2023-02-07 22:00:03,624 32k INFO ====> Epoch: 264
557
+ 2023-02-07 22:01:54,230 32k INFO ====> Epoch: 265
558
+ 2023-02-07 22:04:00,545 32k INFO Train Epoch: 266 [91%]
559
+ 2023-02-07 22:04:00,547 32k INFO [2.5157217979431152, 2.2893288135528564, 7.338489532470703, 14.968390464782715, 0.9041028618812561, 23400, 9.674156218060047e-05]
560
+ 2023-02-07 22:04:12,431 32k INFO ====> Epoch: 266
561
+ 2023-02-07 22:06:42,140 32k INFO ====> Epoch: 267
562
+ 2023-02-07 22:08:39,638 32k INFO ====> Epoch: 268
563
+ 2023-02-07 22:09:18,181 32k INFO Train Epoch: 269 [18%]
564
+ 2023-02-07 22:09:18,182 32k INFO [2.585972785949707, 2.461054563522339, 12.841654777526855, 18.517820358276367, 0.6318978071212769, 23600, 9.670528862935451e-05]
565
+ 2023-02-07 22:10:30,561 32k INFO ====> Epoch: 269
566
+ 2023-02-07 22:12:21,413 32k INFO ====> Epoch: 270
567
+ 2023-02-07 22:13:24,801 32k INFO Train Epoch: 271 [45%]
568
+ 2023-02-07 22:13:24,802 32k INFO [2.421670913696289, 2.4571542739868164, 13.116412162780762, 15.18135929107666, 0.4794858396053314, 23800, 9.668111381821731e-05]
569
+ 2023-02-07 22:14:19,598 32k INFO ====> Epoch: 271
570
+ 2023-02-07 22:16:14,524 32k INFO ====> Epoch: 272
571
+ 2023-02-07 22:17:44,409 32k INFO Train Epoch: 273 [73%]
572
+ 2023-02-07 22:17:44,410 32k INFO [2.4379258155822754, 2.2511990070343018, 12.288382530212402, 17.285614013671875, 0.5320841073989868, 24000, 9.665694505040515e-05]
573
+ 2023-02-07 22:17:48,915 32k INFO Saving model and optimizer state at iteration 273 to ./logs\32k\G_24000.pth
574
+ 2023-02-07 22:18:05,358 32k INFO Saving model and optimizer state at iteration 273 to ./logs\32k\D_24000.pth
575
+ 2023-02-07 22:18:32,487 32k INFO ====> Epoch: 273
576
+ 2023-02-07 22:20:23,184 32k INFO ====> Epoch: 274
577
+ 2023-02-07 22:22:13,578 32k INFO ====> Epoch: 275
578
+ 2023-02-07 22:22:35,919 32k INFO Train Epoch: 276 [0%]
579
+ 2023-02-07 22:22:35,919 32k INFO [2.4696168899536133, 2.276357889175415, 15.216888427734375, 18.051366806030273, 0.7219808101654053, 24200, 9.662070322661676e-05]
580
+ 2023-02-07 22:24:04,182 32k INFO ====> Epoch: 276
581
+ 2023-02-07 22:25:54,360 32k INFO ====> Epoch: 277
582
+ 2023-02-07 22:26:40,891 32k INFO Train Epoch: 278 [27%]
583
+ 2023-02-07 22:26:40,891 32k INFO [2.361176013946533, 2.4721574783325195, 12.992560386657715, 16.2576847076416, 0.4791528880596161, 24400, 9.659654956050859e-05]
584
+ 2023-02-07 22:27:45,265 32k INFO ====> Epoch: 278
585
+ 2023-02-07 22:29:35,652 32k INFO ====> Epoch: 279
586
+ 2023-02-07 22:30:46,343 32k INFO Train Epoch: 280 [55%]
587
+ 2023-02-07 22:30:46,344 32k INFO [2.398617744445801, 2.2190635204315186, 9.964317321777344, 16.1744384765625, 0.6419702172279358, 24600, 9.657240193243954e-05]
588
+ 2023-02-07 22:31:26,369 32k INFO ====> Epoch: 280
589
+ 2023-02-07 22:33:16,339 32k INFO ====> Epoch: 281
590
+ 2023-02-07 22:34:51,532 32k INFO Train Epoch: 282 [82%]
591
+ 2023-02-07 22:34:51,532 32k INFO [2.4355669021606445, 2.365387201309204, 14.452692985534668, 18.16908073425293, 0.7730005979537964, 24800, 9.65482603409002e-05]
592
+ 2023-02-07 22:35:07,353 32k INFO ====> Epoch: 282
593
+ 2023-02-07 22:37:00,123 32k INFO ====> Epoch: 283
594
+ 2023-02-07 22:38:59,381 32k INFO ====> Epoch: 284
595
+ 2023-02-07 22:39:31,093 32k INFO Train Epoch: 285 [9%]
596
+ 2023-02-07 22:39:31,093 32k INFO [2.555506944656372, 2.110740900039673, 8.73676872253418, 14.878194808959961, 0.5724442601203918, 25000, 9.651205926878348e-05]
597
+ 2023-02-07 22:39:35,607 32k INFO Saving model and optimizer state at iteration 285 to ./logs\32k\G_25000.pth
598
+ 2023-02-07 22:39:50,520 32k INFO Saving model and optimizer state at iteration 285 to ./logs\32k\D_25000.pth
599
+ 2023-02-07 22:41:21,915 32k INFO ====> Epoch: 285
600
+ 2023-02-07 22:43:21,782 32k INFO ====> Epoch: 286
601
+ 2023-02-07 22:44:19,787 32k INFO Train Epoch: 287 [36%]
602
+ 2023-02-07 22:44:19,788 32k INFO [2.4445996284484863, 2.372640371322632, 12.413557052612305, 17.518985748291016, 0.5160245895385742, 25200, 9.64879327619672e-05]
603
+ 2023-02-07 22:45:21,196 32k INFO ====> Epoch: 287
604
+ 2023-02-07 22:47:19,661 32k INFO ====> Epoch: 288
605
+ 2023-02-07 22:48:43,726 32k INFO Train Epoch: 289 [64%]
606
+ 2023-02-07 22:48:43,726 32k INFO [2.4691805839538574, 2.2594118118286133, 12.230856895446777, 14.857041358947754, 0.8859390616416931, 25400, 9.646381228640066e-05]
607
+ 2023-02-07 22:49:18,868 32k INFO ====> Epoch: 289
608
+ 2023-02-07 22:51:17,911 32k INFO ====> Epoch: 290
609
+ 2023-02-07 22:53:08,763 32k INFO Train Epoch: 291 [91%]
610
+ 2023-02-07 22:53:08,763 32k INFO [2.9651851654052734, 1.7647536993026733, 4.42325496673584, 10.037627220153809, 0.7796562910079956, 25600, 9.643969784057613e-05]
611
+ 2023-02-07 22:53:17,282 32k INFO ====> Epoch: 291
612
+ 2023-02-07 22:55:35,666 32k INFO ====> Epoch: 292
613
+ 2023-02-07 22:57:30,190 32k INFO ====> Epoch: 293
614
+ 2023-02-07 22:58:09,441 32k INFO Train Epoch: 294 [18%]
615
+ 2023-02-07 22:58:09,442 32k INFO [2.373373508453369, 2.252485990524292, 12.78847885131836, 16.72306251525879, 0.606884241104126, 25800, 9.640353747430838e-05]
616
+ 2023-02-07 22:59:31,654 32k INFO ====> Epoch: 294
617
+ 2023-02-07 23:02:17,308 32k INFO ====> Epoch: 295
618
+ 2023-02-07 23:03:31,257 32k INFO Train Epoch: 296 [45%]
619
+ 2023-02-07 23:03:31,274 32k INFO [2.303215503692627, 2.5511465072631836, 13.934648513793945, 18.49505043029785, 0.8854227066040039, 26000, 9.637943809624507e-05]
620
+ 2023-02-07 23:03:36,038 32k INFO Saving model and optimizer state at iteration 296 to ./logs\32k\G_26000.pth
621
+ 2023-02-07 23:03:54,027 32k INFO Saving model and optimizer state at iteration 296 to ./logs\32k\D_26000.pth
622
+ 2023-02-07 23:04:45,385 32k INFO ====> Epoch: 296
623
+ 2023-02-07 23:06:40,912 32k INFO ====> Epoch: 297
624
+ 2023-02-07 23:08:12,513 32k INFO Train Epoch: 298 [73%]
625
+ 2023-02-07 23:08:12,513 32k INFO [2.4901669025421143, 2.450923442840576, 12.483206748962402, 15.857170104980469, 0.4418282210826874, 26200, 9.635534474264972e-05]
626
+ 2023-02-07 23:08:37,120 32k INFO ====> Epoch: 298
627
+ 2023-02-07 23:10:31,324 32k INFO ====> Epoch: 299
628
+ 2023-02-07 23:12:24,564 32k INFO ====> Epoch: 300
629
+ 2023-02-07 23:12:47,305 32k INFO Train Epoch: 301 [0%]
630
+ 2023-02-07 23:12:47,305 32k INFO [2.5084266662597656, 2.153733730316162, 12.863175392150879, 16.530397415161133, 0.8135973811149597, 26400, 9.631921600483981e-05]
631
+ 2023-02-07 23:14:17,760 32k INFO ====> Epoch: 301
632
+ 2023-02-07 23:16:11,232 32k INFO ====> Epoch: 302
633
+ 2023-02-07 23:16:58,909 32k INFO Train Epoch: 303 [27%]
634
+ 2023-02-07 23:16:58,910 32k INFO [2.3788869380950928, 2.2787249088287354, 11.330511093139648, 14.017260551452637, 0.4352797567844391, 26600, 9.629513770582634e-05]
635
+ 2023-02-07 23:18:07,967 32k INFO ====> Epoch: 303
636
+ 2023-02-07 23:20:14,895 32k INFO ====> Epoch: 304
637
+ 2023-02-07 23:21:37,923 32k INFO Train Epoch: 305 [55%]
638
+ 2023-02-07 23:21:37,923 32k INFO [2.2762134075164795, 2.4930131435394287, 11.308450698852539, 16.77684783935547, 0.5349023342132568, 26800, 9.627106542601141e-05]
639
+ 2023-02-07 23:22:23,260 32k INFO ====> Epoch: 305
640
+ 2023-02-07 23:24:24,074 32k INFO ====> Epoch: 306
641
+ 2023-02-07 23:26:03,684 32k INFO Train Epoch: 307 [82%]
642
+ 2023-02-07 23:26:03,685 32k INFO [2.4888241291046143, 2.198373794555664, 12.962141990661621, 16.00810432434082, 1.0677847862243652, 27000, 9.62469991638903e-05]
643
+ 2023-02-07 23:26:08,344 32k INFO Saving model and optimizer state at iteration 307 to ./logs\32k\G_27000.pth
644
+ 2023-02-07 23:26:26,856 32k INFO Saving model and optimizer state at iteration 307 to ./logs\32k\D_27000.pth
645
+ 2023-02-07 23:26:46,493 32k INFO ====> Epoch: 307
646
+ 2023-02-07 23:28:59,272 32k INFO ====> Epoch: 308
647
+ 2023-02-07 23:33:03,919 32k INFO ====> Epoch: 309
648
+ 2023-02-07 23:33:46,219 32k INFO Train Epoch: 310 [9%]
649
+ 2023-02-07 23:33:46,220 32k INFO [2.477017879486084, 2.274493455886841, 9.995453834533691, 15.900580406188965, 0.2754864990711212, 27200, 9.621091105059392e-05]
650
+ 2023-02-07 23:35:42,296 32k INFO ====> Epoch: 310
651
+ 2023-02-07 23:38:21,168 32k INFO ====> Epoch: 311
652
+ 2023-02-07 23:39:25,317 32k INFO Train Epoch: 312 [36%]
653
+ 2023-02-07 23:39:25,317 32k INFO [2.5796499252319336, 2.222482204437256, 10.898737907409668, 16.291173934936523, 1.0068455934524536, 27400, 9.618685982612675e-05]
654
+ 2023-02-07 23:40:36,498 32k INFO ====> Epoch: 312
655
+ 2023-02-07 23:42:49,333 32k INFO ====> Epoch: 313
656
+ 2023-02-07 23:44:13,367 32k INFO Train Epoch: 314 [64%]
657
+ 2023-02-07 23:44:13,368 32k INFO [2.4441633224487305, 2.428528308868408, 11.276105880737305, 15.440587997436523, 1.0377545356750488, 27600, 9.61628146140899e-05]
658
+ 2023-02-07 23:44:47,378 32k INFO ====> Epoch: 314
659
+ 2023-02-07 23:46:45,485 32k INFO ====> Epoch: 315
660
+ 2023-02-07 23:48:34,997 32k INFO Train Epoch: 316 [91%]
661
+ 2023-02-07 23:48:34,997 32k INFO [2.5431222915649414, 2.133033275604248, 7.831211566925049, 12.808097839355469, 0.9566836953163147, 27800, 9.613877541298036e-05]
662
+ 2023-02-07 23:48:43,380 32k INFO ====> Epoch: 316
663
+ 2023-02-07 23:50:52,321 32k INFO ====> Epoch: 317
664
+ 2023-02-07 23:52:50,708 32k INFO ====> Epoch: 318
665
+ 2023-02-07 23:53:47,432 32k INFO Train Epoch: 319 [18%]
666
+ 2023-02-07 23:53:47,433 32k INFO [2.5341179370880127, 2.2451441287994385, 11.315544128417969, 16.780118942260742, 0.7941577434539795, 28000, 9.61027278785178e-05]
667
+ 2023-02-07 23:53:54,434 32k INFO Saving model and optimizer state at iteration 319 to ./logs\32k\G_28000.pth
668
+ 2023-02-07 23:54:12,087 32k INFO Saving model and optimizer state at iteration 319 to ./logs\32k\D_28000.pth
669
+ 2023-02-07 23:55:33,831 32k INFO ====> Epoch: 319
670
+ 2023-02-07 23:57:50,809 32k INFO ====> Epoch: 320
671
+ 2023-02-07 23:58:57,507 32k INFO Train Epoch: 321 [45%]
672
+ 2023-02-07 23:58:57,508 32k INFO [2.6975388526916504, 2.2120161056518555, 11.405308723449707, 12.978477478027344, 0.8090832233428955, 28200, 9.60787036981533e-05]
673
+ 2023-02-07 23:59:49,075 32k INFO ====> Epoch: 321
674
+ 2023-02-08 00:02:17,117 32k INFO ====> Epoch: 322
675
+ 2023-02-08 00:03:48,979 32k INFO Train Epoch: 323 [73%]
676
+ 2023-02-08 00:03:48,980 32k INFO [2.5260772705078125, 2.178119421005249, 8.271916389465332, 15.082479476928711, 0.5642960071563721, 28400, 9.60546855234585e-05]
677
+ 2023-02-08 00:04:14,283 32k INFO ====> Epoch: 323
678
+ 2023-02-08 00:06:10,300 32k INFO ====> Epoch: 324
679
+ 2023-02-08 00:08:28,132 32k INFO ====> Epoch: 325