Itoifi commited on
Commit
049a091
1 Parent(s): 7c2b8c3

Upload 15 files

Browse files
32k-resona/D_12000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9905418633db4c95b5fecae20a71aba84cd101581f3723ce58bb9f79cb736e82
3
+ size 561098185
32k-resona/D_18000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86cd4c4f522ed3e48c59868a0d2b9bde69634b001609a3cc5e520871a7970068
3
+ size 561098185
32k-resona/D_22000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc80058fb8d7752f49983d3fa793754286d62159bcd75dcf41bcdbbc464bb83e
3
+ size 561098185
32k-resona/D_6000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dd9817641048f7f822eb9cdf7a5ed523a3e357ff85ecfba446a94b6ea98ebfd5
3
+ size 561098185
32k-resona/G_12000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76508e3bb505cab4cb71c7edcc6ac507afaef320a7cbbb757cbe9b7b9f86f226
3
+ size 699505437
32k-resona/G_18000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a5d663c163db166c5a7f0a26929c4870c00b7ac4a6107d0f9870cc4bb4d22a61
3
+ size 699505437
32k-resona/G_22000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5556b132b0f04d2e864b4da4118e722117be4623382b48b5b03e41d174a8eb80
3
+ size 699505437
32k-resona/G_6000.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52b23e33bccbd967d6c78ac7d8cc226927e6de1b638fbe0334aaf60188a4ba27
3
+ size 699505437
32k-resona/config.json ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "train": {
3
+ "log_interval": 200,
4
+ "eval_interval": 1000,
5
+ "seed": 1234,
6
+ "epochs": 10000,
7
+ "learning_rate": 0.0001,
8
+ "betas": [
9
+ 0.8,
10
+ 0.99
11
+ ],
12
+ "eps": 1e-09,
13
+ "batch_size": 6,
14
+ "fp16_run": false,
15
+ "lr_decay": 0.999875,
16
+ "segment_size": 17920,
17
+ "init_lr_ratio": 1,
18
+ "warmup_epochs": 0,
19
+ "c_mel": 45,
20
+ "c_kl": 1.0,
21
+ "use_sr": true,
22
+ "max_speclen": 384,
23
+ "port": "8001"
24
+ },
25
+ "data": {
26
+ "training_files": "filelists/train.txt",
27
+ "validation_files": "filelists/val.txt",
28
+ "max_wav_value": 32768.0,
29
+ "sampling_rate": 32000,
30
+ "filter_length": 1280,
31
+ "hop_length": 320,
32
+ "win_length": 1280,
33
+ "n_mel_channels": 80,
34
+ "mel_fmin": 0.0,
35
+ "mel_fmax": null
36
+ },
37
+ "model": {
38
+ "inter_channels": 192,
39
+ "hidden_channels": 192,
40
+ "filter_channels": 768,
41
+ "n_heads": 2,
42
+ "n_layers": 6,
43
+ "kernel_size": 3,
44
+ "p_dropout": 0.1,
45
+ "resblock": "1",
46
+ "resblock_kernel_sizes": [
47
+ 3,
48
+ 7,
49
+ 11
50
+ ],
51
+ "resblock_dilation_sizes": [
52
+ [
53
+ 1,
54
+ 3,
55
+ 5
56
+ ],
57
+ [
58
+ 1,
59
+ 3,
60
+ 5
61
+ ],
62
+ [
63
+ 1,
64
+ 3,
65
+ 5
66
+ ]
67
+ ],
68
+ "upsample_rates": [
69
+ 10,
70
+ 8,
71
+ 2,
72
+ 2
73
+ ],
74
+ "upsample_initial_channel": 512,
75
+ "upsample_kernel_sizes": [
76
+ 16,
77
+ 16,
78
+ 4,
79
+ 4
80
+ ],
81
+ "n_layers_q": 3,
82
+ "use_spectral_norm": false,
83
+ "gin_channels": 256,
84
+ "ssl_dim": 256,
85
+ "n_speakers": 2
86
+ },
87
+ "spk": {
88
+ "resona": 0
89
+ }
90
+ }
32k-resona/eval/events.out.tfevents.1675860170.DESKTOP-P582Q00.33124.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1833a1cd7ffab385b95c59c4d3bf69622c2619cbe7f94d313835ec127eb8711
3
+ size 40
32k-resona/eval/events.out.tfevents.1675860204.DESKTOP-P582Q00.65440.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:563a09f0d78145e74c451f52d4221ad995ee3d1507782512b88b0adf12db3a58
3
+ size 40
32k-resona/eval/events.out.tfevents.1675860527.DESKTOP-P582Q00.41592.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c103690457aebf3146ded586cac9807047877256f6511b2a5e303a8f4d327de4
3
+ size 40
32k-resona/eval/events.out.tfevents.1675861101.DESKTOP-P582Q00.52652.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe42f99b752843c26334e09245b6933ff7ec46558e0dfd8519ec027d3a7fed9a
3
+ size 40
32k-resona/eval/events.out.tfevents.1675861179.DESKTOP-P582Q00.8620.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4454265e7faaa71a4298549b3af15dbf418321f829522bf08a56f09efe5adf6
3
+ size 53715382
32k-resona/train.log ADDED
@@ -0,0 +1,608 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2023-02-08 23:42:36,711 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 6, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'resona': 0}, 'model_dir': './logs\\32k'}
2
+ 2023-02-08 23:43:22,288 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 6, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'resona': 0}, 'model_dir': './logs\\32k'}
3
+ 2023-02-08 23:48:45,714 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 12, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'resona': 0}, 'model_dir': './logs\\32k'}
4
+ 2023-02-08 23:48:54,959 32k INFO Loaded checkpoint './logs\32k\G_0.pth' (iteration 1)
5
+ 2023-02-08 23:48:58,405 32k INFO Loaded checkpoint './logs\32k\D_0.pth' (iteration 1)
6
+ 2023-02-08 23:58:19,270 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 12, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'resona': 0}, 'model_dir': './logs\\32k'}
7
+ 2023-02-08 23:58:24,560 32k INFO Loaded checkpoint './logs\32k\G_0.pth' (iteration 1)
8
+ 2023-02-08 23:58:24,968 32k INFO Loaded checkpoint './logs\32k\D_0.pth' (iteration 1)
9
+ 2023-02-08 23:59:37,724 32k INFO {'train': {'log_interval': 200, 'eval_interval': 1000, 'seed': 1234, 'epochs': 10000, 'learning_rate': 0.0001, 'betas': [0.8, 0.99], 'eps': 1e-09, 'batch_size': 6, 'fp16_run': False, 'lr_decay': 0.999875, 'segment_size': 17920, 'init_lr_ratio': 1, 'warmup_epochs': 0, 'c_mel': 45, 'c_kl': 1.0, 'use_sr': True, 'max_speclen': 384, 'port': '8001'}, 'data': {'training_files': 'filelists/train.txt', 'validation_files': 'filelists/val.txt', 'max_wav_value': 32768.0, 'sampling_rate': 32000, 'filter_length': 1280, 'hop_length': 320, 'win_length': 1280, 'n_mel_channels': 80, 'mel_fmin': 0.0, 'mel_fmax': None}, 'model': {'inter_channels': 192, 'hidden_channels': 192, 'filter_channels': 768, 'n_heads': 2, 'n_layers': 6, 'kernel_size': 3, 'p_dropout': 0.1, 'resblock': '1', 'resblock_kernel_sizes': [3, 7, 11], 'resblock_dilation_sizes': [[1, 3, 5], [1, 3, 5], [1, 3, 5]], 'upsample_rates': [10, 8, 2, 2], 'upsample_initial_channel': 512, 'upsample_kernel_sizes': [16, 16, 4, 4], 'n_layers_q': 3, 'use_spectral_norm': False, 'gin_channels': 256, 'ssl_dim': 256, 'n_speakers': 2}, 'spk': {'resona': 0}, 'model_dir': './logs\\32k'}
10
+ 2023-02-08 23:59:42,440 32k INFO Loaded checkpoint './logs\32k\G_0.pth' (iteration 1)
11
+ 2023-02-08 23:59:42,831 32k INFO Loaded checkpoint './logs\32k\D_0.pth' (iteration 1)
12
+ 2023-02-09 00:00:09,549 32k INFO Train Epoch: 1 [0%]
13
+ 2023-02-09 00:00:09,550 32k INFO [2.2538223266601562, 2.6597347259521484, 11.763463020324707, 45.49671173095703, 11.207155227661133, 0, 0.0001]
14
+ 2023-02-09 00:00:15,641 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\G_0.pth
15
+ 2023-02-09 00:00:34,164 32k INFO Saving model and optimizer state at iteration 1 to ./logs\32k\D_0.pth
16
+ 2023-02-09 00:01:33,109 32k INFO ====> Epoch: 1
17
+ 2023-02-09 00:02:50,380 32k INFO ====> Epoch: 2
18
+ 2023-02-09 00:04:06,903 32k INFO Train Epoch: 3 [99%]
19
+ 2023-02-09 00:04:06,904 32k INFO [2.4401040077209473, 2.5799942016601562, 9.811338424682617, 22.839824676513672, 1.509987711906433, 200, 9.99750015625e-05]
20
+ 2023-02-09 00:04:07,676 32k INFO ====> Epoch: 3
21
+ 2023-02-09 00:05:24,690 32k INFO ====> Epoch: 4
22
+ 2023-02-09 00:06:41,699 32k INFO ====> Epoch: 5
23
+ 2023-02-09 00:07:57,478 32k INFO Train Epoch: 6 [97%]
24
+ 2023-02-09 00:07:57,479 32k INFO [2.500568389892578, 2.042257785797119, 10.643875122070312, 17.175233840942383, 1.2767752408981323, 400, 9.993751562304699e-05]
25
+ 2023-02-09 00:07:59,058 32k INFO ====> Epoch: 6
26
+ 2023-02-09 00:09:16,169 32k INFO ====> Epoch: 7
27
+ 2023-02-09 00:10:33,248 32k INFO ====> Epoch: 8
28
+ 2023-02-09 00:11:48,262 32k INFO Train Epoch: 9 [96%]
29
+ 2023-02-09 00:11:48,262 32k INFO [2.4632694721221924, 2.3801445960998535, 6.719289779663086, 15.820771217346191, 1.1217436790466309, 600, 9.990004373906418e-05]
30
+ 2023-02-09 00:11:50,769 32k INFO ====> Epoch: 9
31
+ 2023-02-09 00:13:08,802 32k INFO ====> Epoch: 10
32
+ 2023-02-09 00:14:26,016 32k INFO ====> Epoch: 11
33
+ 2023-02-09 00:15:40,270 32k INFO Train Epoch: 12 [94%]
34
+ 2023-02-09 00:15:40,271 32k INFO [2.455298662185669, 2.3402817249298096, 9.666932106018066, 20.384544372558594, 0.6758424043655396, 800, 9.986258590528146e-05]
35
+ 2023-02-09 00:15:43,517 32k INFO ====> Epoch: 12
36
+ 2023-02-09 00:17:00,741 32k INFO ====> Epoch: 13
37
+ 2023-02-09 00:18:18,179 32k INFO ====> Epoch: 14
38
+ 2023-02-09 00:19:31,643 32k INFO Train Epoch: 15 [93%]
39
+ 2023-02-09 00:19:31,643 32k INFO [2.623318672180176, 2.104186534881592, 8.998331069946289, 18.40737533569336, 1.2988810539245605, 1000, 9.982514211643064e-05]
40
+ 2023-02-09 00:19:36,017 32k INFO Saving model and optimizer state at iteration 15 to ./logs\32k\G_1000.pth
41
+ 2023-02-09 00:19:56,035 32k INFO Saving model and optimizer state at iteration 15 to ./logs\32k\D_1000.pth
42
+ 2023-02-09 00:20:03,963 32k INFO ====> Epoch: 15
43
+ 2023-02-09 00:21:21,250 32k INFO ====> Epoch: 16
44
+ 2023-02-09 00:22:38,564 32k INFO ====> Epoch: 17
45
+ 2023-02-09 00:23:51,362 32k INFO Train Epoch: 18 [91%]
46
+ 2023-02-09 00:23:51,362 32k INFO [2.5918359756469727, 2.205848217010498, 7.585756301879883, 19.934139251708984, 0.9796969294548035, 1200, 9.978771236724554e-05]
47
+ 2023-02-09 00:23:56,283 32k INFO ====> Epoch: 18
48
+ 2023-02-09 00:25:13,666 32k INFO ====> Epoch: 19
49
+ 2023-02-09 00:26:30,886 32k INFO ====> Epoch: 20
50
+ 2023-02-09 00:27:42,775 32k INFO Train Epoch: 21 [90%]
51
+ 2023-02-09 00:27:42,775 32k INFO [2.3665695190429688, 2.329713821411133, 8.740349769592285, 18.66284942626953, 0.883953332901001, 1400, 9.975029665246193e-05]
52
+ 2023-02-09 00:27:48,506 32k INFO ====> Epoch: 21
53
+ 2023-02-09 00:29:05,836 32k INFO ====> Epoch: 22
54
+ 2023-02-09 00:30:23,087 32k INFO ====> Epoch: 23
55
+ 2023-02-09 00:31:34,123 32k INFO Train Epoch: 24 [88%]
56
+ 2023-02-09 00:31:34,123 32k INFO [2.644050121307373, 2.4024436473846436, 3.807854652404785, 10.505730628967285, 0.7721247673034668, 1600, 9.971289496681757e-05]
57
+ 2023-02-09 00:31:40,666 32k INFO ====> Epoch: 24
58
+ 2023-02-09 00:32:57,901 32k INFO ====> Epoch: 25
59
+ 2023-02-09 00:34:15,203 32k INFO ====> Epoch: 26
60
+ 2023-02-09 00:35:25,363 32k INFO Train Epoch: 27 [87%]
61
+ 2023-02-09 00:35:25,363 32k INFO [2.5081863403320312, 2.362135648727417, 10.08530044555664, 21.179523468017578, 1.0669621229171753, 1800, 9.967550730505221e-05]
62
+ 2023-02-09 00:35:32,734 32k INFO ====> Epoch: 27
63
+ 2023-02-09 00:36:49,992 32k INFO ====> Epoch: 28
64
+ 2023-02-09 00:38:07,250 32k INFO ====> Epoch: 29
65
+ 2023-02-09 00:39:16,660 32k INFO Train Epoch: 30 [85%]
66
+ 2023-02-09 00:39:16,661 32k INFO [2.220442771911621, 2.801152229309082, 7.649630546569824, 14.329034805297852, 0.8535028696060181, 2000, 9.963813366190753e-05]
67
+ 2023-02-09 00:39:21,154 32k INFO Saving model and optimizer state at iteration 30 to ./logs\32k\G_2000.pth
68
+ 2023-02-09 00:39:37,843 32k INFO Saving model and optimizer state at iteration 30 to ./logs\32k\D_2000.pth
69
+ 2023-02-09 00:39:49,595 32k INFO ====> Epoch: 30
70
+ 2023-02-09 00:41:06,937 32k INFO ====> Epoch: 31
71
+ 2023-02-09 00:42:24,214 32k INFO ====> Epoch: 32
72
+ 2023-02-09 00:43:32,732 32k INFO Train Epoch: 33 [84%]
73
+ 2023-02-09 00:43:32,732 32k INFO [2.5671257972717285, 2.225325107574463, 6.000064849853516, 14.645927429199219, 0.91252601146698, 2200, 9.960077403212722e-05]
74
+ 2023-02-09 00:43:41,803 32k INFO ====> Epoch: 33
75
+ 2023-02-09 00:44:59,115 32k INFO ====> Epoch: 34
76
+ 2023-02-09 00:46:16,581 32k INFO ====> Epoch: 35
77
+ 2023-02-09 00:47:24,262 32k INFO Train Epoch: 36 [82%]
78
+ 2023-02-09 00:47:24,262 32k INFO [2.566441059112549, 2.1268556118011475, 7.592073440551758, 18.499162673950195, 0.9556418657302856, 2400, 9.956342841045691e-05]
79
+ 2023-02-09 00:47:34,260 32k INFO ====> Epoch: 36
80
+ 2023-02-09 00:48:51,449 32k INFO ====> Epoch: 37
81
+ 2023-02-09 00:50:08,699 32k INFO ====> Epoch: 38
82
+ 2023-02-09 00:51:15,505 32k INFO Train Epoch: 39 [81%]
83
+ 2023-02-09 00:51:15,505 32k INFO [2.479832410812378, 2.2814459800720215, 7.773660182952881, 18.745792388916016, 1.0438538789749146, 2600, 9.952609679164422e-05]
84
+ 2023-02-09 00:51:26,185 32k INFO ====> Epoch: 39
85
+ 2023-02-09 00:52:43,425 32k INFO ====> Epoch: 40
86
+ 2023-02-09 00:54:00,760 32k INFO ====> Epoch: 41
87
+ 2023-02-09 00:55:06,807 32k INFO Train Epoch: 42 [79%]
88
+ 2023-02-09 00:55:06,808 32k INFO [2.484856367111206, 2.320491313934326, 10.475366592407227, 19.472793579101562, 1.3102898597717285, 2800, 9.948877917043875e-05]
89
+ 2023-02-09 00:55:18,363 32k INFO ====> Epoch: 42
90
+ 2023-02-09 00:56:35,577 32k INFO ====> Epoch: 43
91
+ 2023-02-09 00:57:52,857 32k INFO ====> Epoch: 44
92
+ 2023-02-09 00:58:58,009 32k INFO Train Epoch: 45 [78%]
93
+ 2023-02-09 00:58:58,010 32k INFO [2.4101104736328125, 2.2083635330200195, 11.62160587310791, 18.154218673706055, 0.8082653880119324, 3000, 9.945147554159202e-05]
94
+ 2023-02-09 00:59:02,398 32k INFO Saving model and optimizer state at iteration 45 to ./logs\32k\G_3000.pth
95
+ 2023-02-09 00:59:20,855 32k INFO Saving model and optimizer state at iteration 45 to ./logs\32k\D_3000.pth
96
+ 2023-02-09 00:59:36,859 32k INFO ====> Epoch: 45
97
+ 2023-02-09 01:00:54,227 32k INFO ====> Epoch: 46
98
+ 2023-02-09 01:02:11,360 32k INFO ====> Epoch: 47
99
+ 2023-02-09 01:03:15,636 32k INFO Train Epoch: 48 [76%]
100
+ 2023-02-09 01:03:15,636 32k INFO [2.5076825618743896, 2.1728971004486084, 8.223286628723145, 14.673565864562988, 0.8116975426673889, 3200, 9.941418589985758e-05]
101
+ 2023-02-09 01:03:28,785 32k INFO ====> Epoch: 48
102
+ 2023-02-09 01:04:46,096 32k INFO ====> Epoch: 49
103
+ 2023-02-09 01:06:03,441 32k INFO ====> Epoch: 50
104
+ 2023-02-09 01:07:07,086 32k INFO Train Epoch: 51 [75%]
105
+ 2023-02-09 01:07:07,086 32k INFO [2.6912717819213867, 2.093996047973633, 7.675986289978027, 15.62138557434082, 1.2493226528167725, 3400, 9.937691023999092e-05]
106
+ 2023-02-09 01:07:21,073 32k INFO ====> Epoch: 51
107
+ 2023-02-09 01:08:38,316 32k INFO ====> Epoch: 52
108
+ 2023-02-09 01:09:55,540 32k INFO ====> Epoch: 53
109
+ 2023-02-09 01:10:58,191 32k INFO Train Epoch: 54 [73%]
110
+ 2023-02-09 01:10:58,192 32k INFO [2.269256114959717, 2.731293201446533, 8.000404357910156, 14.131468772888184, 1.039566159248352, 3600, 9.933964855674948e-05]
111
+ 2023-02-09 01:11:13,014 32k INFO ====> Epoch: 54
112
+ 2023-02-09 01:12:30,251 32k INFO ====> Epoch: 55
113
+ 2023-02-09 01:13:47,584 32k INFO ====> Epoch: 56
114
+ 2023-02-09 01:14:49,462 32k INFO Train Epoch: 57 [72%]
115
+ 2023-02-09 01:14:49,462 32k INFO [2.462212324142456, 2.380282402038574, 9.700272560119629, 19.343175888061523, 1.1376886367797852, 3800, 9.930240084489267e-05]
116
+ 2023-02-09 01:15:05,201 32k INFO ====> Epoch: 57
117
+ 2023-02-09 01:16:22,533 32k INFO ====> Epoch: 58
118
+ 2023-02-09 01:17:39,809 32k INFO ====> Epoch: 59
119
+ 2023-02-09 01:18:40,881 32k INFO Train Epoch: 60 [70%]
120
+ 2023-02-09 01:18:40,882 32k INFO [2.4092752933502197, 2.3126306533813477, 11.08122730255127, 19.267513275146484, 0.5496549010276794, 4000, 9.926516709918191e-05]
121
+ 2023-02-09 01:18:45,348 32k INFO Saving model and optimizer state at iteration 60 to ./logs\32k\G_4000.pth
122
+ 2023-02-09 01:19:01,949 32k INFO Saving model and optimizer state at iteration 60 to ./logs\32k\D_4000.pth
123
+ 2023-02-09 01:19:22,256 32k INFO ====> Epoch: 60
124
+ 2023-02-09 01:20:39,651 32k INFO ====> Epoch: 61
125
+ 2023-02-09 01:21:57,031 32k INFO ====> Epoch: 62
126
+ 2023-02-09 01:22:57,338 32k INFO Train Epoch: 63 [69%]
127
+ 2023-02-09 01:22:57,338 32k INFO [2.407628059387207, 2.4265987873077393, 9.841700553894043, 20.425884246826172, 1.1777186393737793, 4200, 9.922794731438052e-05]
128
+ 2023-02-09 01:23:14,639 32k INFO ====> Epoch: 63
129
+ 2023-02-09 01:24:32,051 32k INFO ====> Epoch: 64
130
+ 2023-02-09 01:25:49,407 32k INFO ====> Epoch: 65
131
+ 2023-02-09 01:26:48,771 32k INFO Train Epoch: 66 [67%]
132
+ 2023-02-09 01:26:48,772 32k INFO [2.4743025302886963, 2.436379909515381, 9.201873779296875, 18.20071029663086, 0.933559000492096, 4400, 9.919074148525384e-05]
133
+ 2023-02-09 01:27:06,967 32k INFO ====> Epoch: 66
134
+ 2023-02-09 01:28:24,205 32k INFO ====> Epoch: 67
135
+ 2023-02-09 01:29:41,611 32k INFO ====> Epoch: 68
136
+ 2023-02-09 01:30:55,639 32k INFO Train Epoch: 69 [66%]
137
+ 2023-02-09 01:30:55,640 32k INFO [2.476935863494873, 2.297214984893799, 11.784645080566406, 20.394569396972656, 0.9714463353157043, 4600, 9.915354960656915e-05]
138
+ 2023-02-09 01:31:14,621 32k INFO ====> Epoch: 69
139
+ 2023-02-09 01:32:31,900 32k INFO ====> Epoch: 70
140
+ 2023-02-09 01:33:49,165 32k INFO ====> Epoch: 71
141
+ 2023-02-09 01:34:46,907 32k INFO Train Epoch: 72 [64%]
142
+ 2023-02-09 01:34:46,908 32k INFO [2.5771408081054688, 2.3849239349365234, 10.185483932495117, 17.841482162475586, 0.7261289358139038, 4800, 9.911637167309565e-05]
143
+ 2023-02-09 01:35:06,816 32k INFO ====> Epoch: 72
144
+ 2023-02-09 01:36:24,001 32k INFO ====> Epoch: 73
145
+ 2023-02-09 01:37:41,262 32k INFO ====> Epoch: 74
146
+ 2023-02-09 01:38:38,250 32k INFO Train Epoch: 75 [63%]
147
+ 2023-02-09 01:38:38,250 32k INFO [2.465418815612793, 2.232435941696167, 10.514191627502441, 20.439416885375977, 1.0744853019714355, 5000, 9.907920767960457e-05]
148
+ 2023-02-09 01:38:42,755 32k INFO Saving model and optimizer state at iteration 75 to ./logs\32k\G_5000.pth
149
+ 2023-02-09 01:38:58,887 32k INFO Saving model and optimizer state at iteration 75 to ./logs\32k\D_5000.pth
150
+ 2023-02-09 01:39:23,174 32k INFO ====> Epoch: 75
151
+ 2023-02-09 01:40:40,570 32k INFO ====> Epoch: 76
152
+ 2023-02-09 01:41:58,063 32k INFO ====> Epoch: 77
153
+ 2023-02-09 01:42:54,165 32k INFO Train Epoch: 78 [61%]
154
+ 2023-02-09 01:42:54,165 32k INFO [2.6826303005218506, 2.095520496368408, 3.6221730709075928, 12.267330169677734, 0.8276277184486389, 5200, 9.904205762086905e-05]
155
+ 2023-02-09 01:43:15,721 32k INFO ====> Epoch: 78
156
+ 2023-02-09 01:44:33,155 32k INFO ====> Epoch: 79
157
+ 2023-02-09 01:45:50,627 32k INFO ====> Epoch: 80
158
+ 2023-02-09 01:46:45,893 32k INFO Train Epoch: 81 [60%]
159
+ 2023-02-09 01:46:45,893 32k INFO [2.520723342895508, 2.3147475719451904, 9.123695373535156, 19.63064956665039, 0.5058915019035339, 5400, 9.900492149166423e-05]
160
+ 2023-02-09 01:47:08,201 32k INFO ====> Epoch: 81
161
+ 2023-02-09 01:48:25,508 32k INFO ====> Epoch: 82
162
+ 2023-02-09 01:49:42,705 32k INFO ====> Epoch: 83
163
+ 2023-02-09 01:50:37,129 32k INFO Train Epoch: 84 [58%]
164
+ 2023-02-09 01:50:37,129 32k INFO [2.2849955558776855, 2.513012409210205, 12.865352630615234, 19.767900466918945, 0.9678688049316406, 5600, 9.896779928676716e-05]
165
+ 2023-02-09 01:51:00,273 32k INFO ====> Epoch: 84
166
+ 2023-02-09 01:52:17,483 32k INFO ====> Epoch: 85
167
+ 2023-02-09 01:53:34,853 32k INFO ====> Epoch: 86
168
+ 2023-02-09 01:54:28,632 32k INFO Train Epoch: 87 [57%]
169
+ 2023-02-09 01:54:28,633 32k INFO [2.230534553527832, 2.553818464279175, 10.746828079223633, 17.61080551147461, 0.6103332042694092, 5800, 9.89306910009569e-05]
170
+ 2023-02-09 01:54:52,593 32k INFO ====> Epoch: 87
171
+ 2023-02-09 01:56:09,924 32k INFO ====> Epoch: 88
172
+ 2023-02-09 01:57:27,133 32k INFO ====> Epoch: 89
173
+ 2023-02-09 01:58:19,773 32k INFO Train Epoch: 90 [55%]
174
+ 2023-02-09 01:58:19,773 32k INFO [2.9170384407043457, 1.8074010610580444, 6.252444267272949, 11.759191513061523, 0.5820555090904236, 6000, 9.889359662901445e-05]
175
+ 2023-02-09 01:58:24,216 32k INFO Saving model and optimizer state at iteration 90 to ./logs\32k\G_6000.pth
176
+ 2023-02-09 01:58:41,937 32k INFO Saving model and optimizer state at iteration 90 to ./logs\32k\D_6000.pth
177
+ 2023-02-09 01:59:10,016 32k INFO ====> Epoch: 90
178
+ 2023-02-09 02:00:27,480 32k INFO ====> Epoch: 91
179
+ 2023-02-09 02:01:44,927 32k INFO ====> Epoch: 92
180
+ 2023-02-09 02:02:36,973 32k INFO Train Epoch: 93 [54%]
181
+ 2023-02-09 02:02:36,974 32k INFO [2.248124837875366, 2.691680908203125, 9.031513214111328, 17.063570022583008, 0.8340508937835693, 6200, 9.885651616572276e-05]
182
+ 2023-02-09 02:03:02,579 32k INFO ====> Epoch: 93
183
+ 2023-02-09 02:04:19,880 32k INFO ====> Epoch: 94
184
+ 2023-02-09 02:05:37,183 32k INFO ====> Epoch: 95
185
+ 2023-02-09 02:06:28,292 32k INFO Train Epoch: 96 [52%]
186
+ 2023-02-09 02:06:28,292 32k INFO [2.7041618824005127, 2.0895659923553467, 6.933722019195557, 14.279449462890625, 0.9478716254234314, 6400, 9.881944960586671e-05]
187
+ 2023-02-09 02:06:54,691 32k INFO ====> Epoch: 96
188
+ 2023-02-09 02:08:11,826 32k INFO ====> Epoch: 97
189
+ 2023-02-09 02:09:29,228 32k INFO ====> Epoch: 98
190
+ 2023-02-09 02:10:19,605 32k INFO Train Epoch: 99 [51%]
191
+ 2023-02-09 02:10:19,605 32k INFO [2.465763568878174, 2.286102294921875, 8.612666130065918, 17.839487075805664, 0.8176154494285583, 6600, 9.87823969442332e-05]
192
+ 2023-02-09 02:10:46,982 32k INFO ====> Epoch: 99
193
+ 2023-02-09 02:12:04,533 32k INFO ====> Epoch: 100
194
+ 2023-02-09 02:13:21,854 32k INFO ====> Epoch: 101
195
+ 2023-02-09 02:14:11,250 32k INFO Train Epoch: 102 [49%]
196
+ 2023-02-09 02:14:11,250 32k INFO [2.325270652770996, 2.503894567489624, 13.427793502807617, 18.58626937866211, 1.1115485429763794, 6800, 9.874535817561101e-05]
197
+ 2023-02-09 02:14:39,395 32k INFO ====> Epoch: 102
198
+ 2023-02-09 02:15:56,665 32k INFO ====> Epoch: 103
199
+ 2023-02-09 02:17:13,783 32k INFO ====> Epoch: 104
200
+ 2023-02-09 02:18:02,422 32k INFO Train Epoch: 105 [48%]
201
+ 2023-02-09 02:18:02,422 32k INFO [2.5144426822662354, 2.262126922607422, 9.593130111694336, 19.810073852539062, 0.9265149235725403, 7000, 9.870833329479095e-05]
202
+ 2023-02-09 02:18:06,802 32k INFO Saving model and optimizer state at iteration 105 to ./logs\32k\G_7000.pth
203
+ 2023-02-09 02:18:26,842 32k INFO Saving model and optimizer state at iteration 105 to ./logs\32k\D_7000.pth
204
+ 2023-02-09 02:18:59,625 32k INFO ====> Epoch: 105
205
+ 2023-02-09 02:20:16,918 32k INFO ====> Epoch: 106
206
+ 2023-02-09 02:21:34,168 32k INFO ====> Epoch: 107
207
+ 2023-02-09 02:22:22,051 32k INFO Train Epoch: 108 [46%]
208
+ 2023-02-09 02:22:22,051 32k INFO [2.5437726974487305, 1.968860387802124, 8.248473167419434, 15.724898338317871, 0.8549317717552185, 7200, 9.867132229656573e-05]
209
+ 2023-02-09 02:22:51,735 32k INFO ====> Epoch: 108
210
+ 2023-02-09 02:24:08,913 32k INFO ====> Epoch: 109
211
+ 2023-02-09 02:25:26,159 32k INFO ====> Epoch: 110
212
+ 2023-02-09 02:26:13,089 32k INFO Train Epoch: 111 [45%]
213
+ 2023-02-09 02:26:13,090 32k INFO [2.4943034648895264, 2.1690049171447754, 8.534195899963379, 18.047616958618164, 0.6213272213935852, 7400, 9.863432517573002e-05]
214
+ 2023-02-09 02:26:43,675 32k INFO ====> Epoch: 111
215
+ 2023-02-09 02:28:00,862 32k INFO ====> Epoch: 112
216
+ 2023-02-09 02:29:18,116 32k INFO ====> Epoch: 113
217
+ 2023-02-09 02:30:04,341 32k INFO Train Epoch: 114 [43%]
218
+ 2023-02-09 02:30:04,342 32k INFO [2.590273141860962, 2.244678497314453, 8.331713676452637, 17.773584365844727, 0.4851672947406769, 7600, 9.859734192708044e-05]
219
+ 2023-02-09 02:30:35,745 32k INFO ====> Epoch: 114
220
+ 2023-02-09 02:31:52,967 32k INFO ====> Epoch: 115
221
+ 2023-02-09 02:33:10,215 32k INFO ====> Epoch: 116
222
+ 2023-02-09 02:33:55,596 32k INFO Train Epoch: 117 [42%]
223
+ 2023-02-09 02:33:55,597 32k INFO [2.43920636177063, 2.299633026123047, 10.326704978942871, 19.797151565551758, 0.7202500104904175, 7800, 9.85603725454156e-05]
224
+ 2023-02-09 02:34:27,882 32k INFO ====> Epoch: 117
225
+ 2023-02-09 02:35:45,221 32k INFO ====> Epoch: 118
226
+ 2023-02-09 02:37:02,373 32k INFO ====> Epoch: 119
227
+ 2023-02-09 02:37:46,886 32k INFO Train Epoch: 120 [40%]
228
+ 2023-02-09 02:37:46,887 32k INFO [2.7452502250671387, 1.881577968597412, 6.52833890914917, 14.799930572509766, 0.5330202579498291, 8000, 9.8523417025536e-05]
229
+ 2023-02-09 02:37:51,352 32k INFO Saving model and optimizer state at iteration 120 to ./logs\32k\G_8000.pth
230
+ 2023-02-09 02:38:05,481 32k INFO Saving model and optimizer state at iteration 120 to ./logs\32k\D_8000.pth
231
+ 2023-02-09 02:38:41,822 32k INFO ====> Epoch: 120
232
+ 2023-02-09 02:39:59,271 32k INFO ====> Epoch: 121
233
+ 2023-02-09 02:41:16,709 32k INFO ====> Epoch: 122
234
+ 2023-02-09 02:42:00,437 32k INFO Train Epoch: 123 [39%]
235
+ 2023-02-09 02:42:00,437 32k INFO [2.5362741947174072, 2.0902092456817627, 8.579750061035156, 16.44462776184082, 0.640954852104187, 8200, 9.848647536224416e-05]
236
+ 2023-02-09 02:42:34,336 32k INFO ====> Epoch: 123
237
+ 2023-02-09 02:43:51,542 32k INFO ====> Epoch: 124
238
+ 2023-02-09 02:45:08,701 32k INFO ====> Epoch: 125
239
+ 2023-02-09 02:45:51,519 32k INFO Train Epoch: 126 [37%]
240
+ 2023-02-09 02:45:51,519 32k INFO [2.5620243549346924, 2.1529924869537354, 6.262371063232422, 14.607771873474121, 0.6447804570198059, 8400, 9.84495475503445e-05]
241
+ 2023-02-09 02:46:26,225 32k INFO ====> Epoch: 126
242
+ 2023-02-09 02:47:43,549 32k INFO ====> Epoch: 127
243
+ 2023-02-09 02:49:00,851 32k INFO ====> Epoch: 128
244
+ 2023-02-09 02:49:43,071 32k INFO Train Epoch: 129 [36%]
245
+ 2023-02-09 02:49:43,072 32k INFO [2.6855649948120117, 2.093135356903076, 8.865744590759277, 14.999298095703125, 0.4640387296676636, 8600, 9.841263358464336e-05]
246
+ 2023-02-09 02:50:18,555 32k INFO ====> Epoch: 129
247
+ 2023-02-09 02:51:35,826 32k INFO ====> Epoch: 130
248
+ 2023-02-09 02:52:53,221 32k INFO ====> Epoch: 131
249
+ 2023-02-09 02:53:34,341 32k INFO Train Epoch: 132 [34%]
250
+ 2023-02-09 02:53:34,342 32k INFO [2.45322322845459, 2.1377835273742676, 7.04033899307251, 14.747097969055176, 0.6762568950653076, 8800, 9.837573345994909e-05]
251
+ 2023-02-09 02:54:10,739 32k INFO ====> Epoch: 132
252
+ 2023-02-09 02:55:27,964 32k INFO ====> Epoch: 133
253
+ 2023-02-09 02:56:45,156 32k INFO ====> Epoch: 134
254
+ 2023-02-09 02:57:25,569 32k INFO Train Epoch: 135 [33%]
255
+ 2023-02-09 02:57:25,569 32k INFO [2.516730308532715, 2.3212270736694336, 9.098499298095703, 18.753353118896484, 0.8016514182090759, 9000, 9.833884717107196e-05]
256
+ 2023-02-09 02:57:30,052 32k INFO Saving model and optimizer state at iteration 135 to ./logs\32k\G_9000.pth
257
+ 2023-02-09 02:57:49,313 32k INFO Saving model and optimizer state at iteration 135 to ./logs\32k\D_9000.pth
258
+ 2023-02-09 02:58:30,142 32k INFO ====> Epoch: 135
259
+ 2023-02-09 02:59:47,578 32k INFO ====> Epoch: 136
260
+ 2023-02-09 03:01:04,989 32k INFO ====> Epoch: 137
261
+ 2023-02-09 03:01:44,604 32k INFO Train Epoch: 138 [31%]
262
+ 2023-02-09 03:01:44,604 32k INFO [2.5661885738372803, 2.1553306579589844, 12.048609733581543, 19.532424926757812, 1.2839446067810059, 9200, 9.830197471282419e-05]
263
+ 2023-02-09 03:02:22,615 32k INFO ====> Epoch: 138
264
+ 2023-02-09 03:03:39,975 32k INFO ====> Epoch: 139
265
+ 2023-02-09 03:04:57,273 32k INFO ====> Epoch: 140
266
+ 2023-02-09 03:05:35,900 32k INFO Train Epoch: 141 [30%]
267
+ 2023-02-09 03:05:35,901 32k INFO [2.4171464443206787, 2.1831154823303223, 7.9948649406433105, 16.37220001220703, 0.44416582584381104, 9400, 9.826511608001993e-05]
268
+ 2023-02-09 03:06:14,772 32k INFO ====> Epoch: 141
269
+ 2023-02-09 03:07:32,122 32k INFO ====> Epoch: 142
270
+ 2023-02-09 03:08:49,280 32k INFO ====> Epoch: 143
271
+ 2023-02-09 03:09:27,103 32k INFO Train Epoch: 144 [28%]
272
+ 2023-02-09 03:09:27,104 32k INFO [2.356396436691284, 2.559436798095703, 10.186924934387207, 18.508615493774414, 0.9834058284759521, 9600, 9.822827126747529e-05]
273
+ 2023-02-09 03:10:06,785 32k INFO ====> Epoch: 144
274
+ 2023-02-09 03:11:24,022 32k INFO ====> Epoch: 145
275
+ 2023-02-09 03:12:41,266 32k INFO ====> Epoch: 146
276
+ 2023-02-09 03:13:18,484 32k INFO Train Epoch: 147 [27%]
277
+ 2023-02-09 03:13:18,485 32k INFO [2.5275373458862305, 2.34236478805542, 11.132747650146484, 20.919368743896484, 0.8620054721832275, 9800, 9.819144027000834e-05]
278
+ 2023-02-09 03:13:58,998 32k INFO ====> Epoch: 147
279
+ 2023-02-09 03:15:16,211 32k INFO ====> Epoch: 148
280
+ 2023-02-09 03:16:33,518 32k INFO ====> Epoch: 149
281
+ 2023-02-09 03:17:09,655 32k INFO Train Epoch: 150 [25%]
282
+ 2023-02-09 03:17:09,655 32k INFO [2.490751028060913, 2.18687105178833, 8.969452857971191, 16.82117462158203, 0.8959197998046875, 10000, 9.815462308243906e-05]
283
+ 2023-02-09 03:17:14,164 32k INFO Saving model and optimizer state at iteration 150 to ./logs\32k\G_10000.pth
284
+ 2023-02-09 03:17:34,194 32k INFO Saving model and optimizer state at iteration 150 to ./logs\32k\D_10000.pth
285
+ 2023-02-09 03:18:19,227 32k INFO ====> Epoch: 150
286
+ 2023-02-09 03:19:36,776 32k INFO ====> Epoch: 151
287
+ 2023-02-09 03:20:54,200 32k INFO ====> Epoch: 152
288
+ 2023-02-09 03:21:29,735 32k INFO Train Epoch: 153 [24%]
289
+ 2023-02-09 03:21:29,735 32k INFO [2.548633575439453, 2.158717393875122, 10.489928245544434, 18.32561683654785, 1.113476276397705, 10200, 9.811781969958938e-05]
290
+ 2023-02-09 03:22:11,964 32k INFO ====> Epoch: 153
291
+ 2023-02-09 03:23:29,243 32k INFO ====> Epoch: 154
292
+ 2023-02-09 03:24:46,521 32k INFO ====> Epoch: 155
293
+ 2023-02-09 03:25:21,203 32k INFO Train Epoch: 156 [22%]
294
+ 2023-02-09 03:25:21,204 32k INFO [2.6085586547851562, 2.2196943759918213, 6.367833137512207, 16.077957153320312, 1.0498080253601074, 10400, 9.808103011628319e-05]
295
+ 2023-02-09 03:26:04,324 32k INFO ====> Epoch: 156
296
+ 2023-02-09 03:27:21,640 32k INFO ====> Epoch: 157
297
+ 2023-02-09 03:28:38,959 32k INFO ====> Epoch: 158
298
+ 2023-02-09 03:29:12,697 32k INFO Train Epoch: 159 [21%]
299
+ 2023-02-09 03:29:12,697 32k INFO [2.4405250549316406, 2.2709829807281494, 9.20801830291748, 17.569778442382812, 0.8504314422607422, 10600, 9.804425432734629e-05]
300
+ 2023-02-09 03:29:56,480 32k INFO ====> Epoch: 159
301
+ 2023-02-09 03:31:13,840 32k INFO ====> Epoch: 160
302
+ 2023-02-09 03:32:31,264 32k INFO ====> Epoch: 161
303
+ 2023-02-09 03:33:04,232 32k INFO Train Epoch: 162 [19%]
304
+ 2023-02-09 03:33:04,232 32k INFO [2.257436752319336, 2.3079981803894043, 7.7498016357421875, 11.477513313293457, 0.9249159693717957, 10800, 9.800749232760646e-05]
305
+ 2023-02-09 03:33:48,914 32k INFO ====> Epoch: 162
306
+ 2023-02-09 03:35:06,402 32k INFO ====> Epoch: 163
307
+ 2023-02-09 03:36:23,790 32k INFO ====> Epoch: 164
308
+ 2023-02-09 03:36:56,013 32k INFO Train Epoch: 165 [18%]
309
+ 2023-02-09 03:36:56,014 32k INFO [2.8059158325195312, 2.351126194000244, 6.358356952667236, 17.166919708251953, 0.9102870225906372, 11000, 9.797074411189339e-05]
310
+ 2023-02-09 03:37:00,483 32k INFO Saving model and optimizer state at iteration 165 to ./logs\32k\G_11000.pth
311
+ 2023-02-09 03:37:19,672 32k INFO Saving model and optimizer state at iteration 165 to ./logs\32k\D_11000.pth
312
+ 2023-02-09 03:38:09,015 32k INFO ====> Epoch: 165
313
+ 2023-02-09 03:39:26,362 32k INFO ====> Epoch: 166
314
+ 2023-02-09 03:40:43,704 32k INFO ====> Epoch: 167
315
+ 2023-02-09 03:41:15,021 32k INFO Train Epoch: 168 [16%]
316
+ 2023-02-09 03:41:15,021 32k INFO [2.475376844406128, 2.1227407455444336, 12.1411771774292, 18.27838897705078, 0.7968288064002991, 11200, 9.79340096750387e-05]
317
+ 2023-02-09 03:42:01,310 32k INFO ====> Epoch: 168
318
+ 2023-02-09 03:43:18,712 32k INFO ====> Epoch: 169
319
+ 2023-02-09 03:44:36,075 32k INFO ====> Epoch: 170
320
+ 2023-02-09 03:45:06,554 32k INFO Train Epoch: 171 [15%]
321
+ 2023-02-09 03:45:06,554 32k INFO [2.3927536010742188, 2.2263312339782715, 12.99544620513916, 19.178787231445312, 0.8865859508514404, 11400, 9.789728901187598e-05]
322
+ 2023-02-09 03:45:53,638 32k INFO ====> Epoch: 171
323
+ 2023-02-09 03:47:10,832 32k INFO ====> Epoch: 172
324
+ 2023-02-09 03:48:28,090 32k INFO ====> Epoch: 173
325
+ 2023-02-09 03:48:57,840 32k INFO Train Epoch: 174 [13%]
326
+ 2023-02-09 03:48:57,841 32k INFO [2.629887580871582, 1.9182734489440918, 8.588482856750488, 16.633516311645508, 0.8498013019561768, 11600, 9.786058211724074e-05]
327
+ 2023-02-09 03:49:45,780 32k INFO ====> Epoch: 174
328
+ 2023-02-09 03:51:03,165 32k INFO ====> Epoch: 175
329
+ 2023-02-09 03:52:20,366 32k INFO ====> Epoch: 176
330
+ 2023-02-09 03:52:48,998 32k INFO Train Epoch: 177 [12%]
331
+ 2023-02-09 03:52:48,999 32k INFO [2.5986101627349854, 2.2137248516082764, 8.166472434997559, 14.432415962219238, 0.9092381596565247, 11800, 9.782388898597041e-05]
332
+ 2023-02-09 03:53:37,879 32k INFO ====> Epoch: 177
333
+ 2023-02-09 03:54:55,253 32k INFO ====> Epoch: 178
334
+ 2023-02-09 03:56:12,342 32k INFO ====> Epoch: 179
335
+ 2023-02-09 03:56:40,201 32k INFO Train Epoch: 180 [10%]
336
+ 2023-02-09 03:56:40,202 32k INFO [2.377601146697998, 2.379459857940674, 10.841035842895508, 17.28376007080078, 1.0782395601272583, 12000, 9.778720961290439e-05]
337
+ 2023-02-09 03:56:44,727 32k INFO Saving model and optimizer state at iteration 180 to ./logs\32k\G_12000.pth
338
+ 2023-02-09 03:57:02,672 32k INFO Saving model and optimizer state at iteration 180 to ./logs\32k\D_12000.pth
339
+ 2023-02-09 03:57:55,590 32k INFO ====> Epoch: 180
340
+ 2023-02-09 03:59:12,901 32k INFO ====> Epoch: 181
341
+ 2023-02-09 04:00:30,140 32k INFO ====> Epoch: 182
342
+ 2023-02-09 04:00:57,262 32k INFO Train Epoch: 183 [9%]
343
+ 2023-02-09 04:00:57,263 32k INFO [2.599541664123535, 2.0492987632751465, 7.433438301086426, 15.581904411315918, 0.7799375653266907, 12200, 9.7750543992884e-05]
344
+ 2023-02-09 04:01:47,831 32k INFO ====> Epoch: 183
345
+ 2023-02-09 04:03:05,064 32k INFO ====> Epoch: 184
346
+ 2023-02-09 04:04:22,336 32k INFO ====> Epoch: 185
347
+ 2023-02-09 04:04:48,629 32k INFO Train Epoch: 186 [7%]
348
+ 2023-02-09 04:04:48,629 32k INFO [2.708937406539917, 2.195622682571411, 8.098736763000488, 16.33951187133789, 1.0215028524398804, 12400, 9.771389212075249e-05]
349
+ 2023-02-09 04:05:39,876 32k INFO ====> Epoch: 186
350
+ 2023-02-09 04:06:57,132 32k INFO ====> Epoch: 187
351
+ 2023-02-09 04:08:14,442 32k INFO ====> Epoch: 188
352
+ 2023-02-09 04:08:39,906 32k INFO Train Epoch: 189 [6%]
353
+ 2023-02-09 04:08:39,907 32k INFO [2.543393135070801, 2.578526258468628, 9.853076934814453, 19.175247192382812, 0.9220128655433655, 12600, 9.767725399135504e-05]
354
+ 2023-02-09 04:09:31,978 32k INFO ====> Epoch: 189
355
+ 2023-02-09 04:10:49,213 32k INFO ====> Epoch: 190
356
+ 2023-02-09 04:12:06,344 32k INFO ====> Epoch: 191
357
+ 2023-02-09 04:12:31,054 32k INFO Train Epoch: 192 [4%]
358
+ 2023-02-09 04:12:31,055 32k INFO [2.5891571044921875, 2.2922167778015137, 9.008637428283691, 15.618788719177246, 0.6253546476364136, 12800, 9.764062959953878e-05]
359
+ 2023-02-09 04:13:24,094 32k INFO ====> Epoch: 192
360
+ 2023-02-09 04:14:41,399 32k INFO ====> Epoch: 193
361
+ 2023-02-09 04:15:58,594 32k INFO ====> Epoch: 194
362
+ 2023-02-09 04:16:22,307 32k INFO Train Epoch: 195 [3%]
363
+ 2023-02-09 04:16:22,307 32k INFO [2.571204900741577, 2.3370144367218018, 11.431970596313477, 19.423154830932617, 0.8416497707366943, 13000, 9.760401894015275e-05]
364
+ 2023-02-09 04:16:26,662 32k INFO Saving model and optimizer state at iteration 195 to ./logs\32k\G_13000.pth
365
+ 2023-02-09 04:16:45,402 32k INFO Saving model and optimizer state at iteration 195 to ./logs\32k\D_13000.pth
366
+ 2023-02-09 04:17:42,745 32k INFO ====> Epoch: 195
367
+ 2023-02-09 04:19:00,170 32k INFO ====> Epoch: 196
368
+ 2023-02-09 04:20:17,357 32k INFO ====> Epoch: 197
369
+ 2023-02-09 04:20:40,493 32k INFO Train Epoch: 198 [1%]
370
+ 2023-02-09 04:20:40,493 32k INFO [2.6889920234680176, 2.216010332107544, 9.279974937438965, 17.859813690185547, 0.6048229932785034, 13200, 9.756742200804793e-05]
371
+ 2023-02-09 04:21:35,077 32k INFO ====> Epoch: 198
372
+ 2023-02-09 04:22:52,432 32k INFO ====> Epoch: 199
373
+ 2023-02-09 04:24:09,676 32k INFO ====> Epoch: 200
374
+ 2023-02-09 04:24:31,716 32k INFO Train Epoch: 201 [0%]
375
+ 2023-02-09 04:24:31,716 32k INFO [2.5305514335632324, 2.058469772338867, 7.646169662475586, 16.716352462768555, 1.0848625898361206, 13400, 9.753083879807726e-05]
376
+ 2023-02-09 04:25:27,141 32k INFO ====> Epoch: 201
377
+ 2023-02-09 04:26:44,453 32k INFO ====> Epoch: 202
378
+ 2023-02-09 04:28:01,216 32k INFO Train Epoch: 203 [99%]
379
+ 2023-02-09 04:28:01,217 32k INFO [2.600423812866211, 2.4881081581115723, 8.454794883728027, 15.183467864990234, 0.6778528690338135, 13600, 9.750645761229709e-05]
380
+ 2023-02-09 04:28:01,979 32k INFO ====> Epoch: 203
381
+ 2023-02-09 04:29:19,697 32k INFO ====> Epoch: 204
382
+ 2023-02-09 04:30:36,964 32k INFO ====> Epoch: 205
383
+ 2023-02-09 04:31:52,908 32k INFO Train Epoch: 206 [97%]
384
+ 2023-02-09 04:31:52,908 32k INFO [2.2509708404541016, 2.403043270111084, 12.252212524414062, 18.043561935424805, 0.5615130662918091, 13800, 9.746989726111722e-05]
385
+ 2023-02-09 04:31:54,581 32k INFO ====> Epoch: 206
386
+ 2023-02-09 04:33:11,675 32k INFO ====> Epoch: 207
387
+ 2023-02-09 04:34:28,928 32k INFO ====> Epoch: 208
388
+ 2023-02-09 04:35:44,062 32k INFO Train Epoch: 209 [96%]
389
+ 2023-02-09 04:35:44,062 32k INFO [2.617187261581421, 2.0524563789367676, 7.7549052238464355, 16.08784294128418, 0.8999805450439453, 14000, 9.743335061835535e-05]
390
+ 2023-02-09 04:35:48,541 32k INFO Saving model and optimizer state at iteration 209 to ./logs\32k\G_14000.pth
391
+ 2023-02-09 04:36:07,364 32k INFO Saving model and optimizer state at iteration 209 to ./logs\32k\D_14000.pth
392
+ 2023-02-09 04:36:13,209 32k INFO ====> Epoch: 209
393
+ 2023-02-09 04:37:30,391 32k INFO ====> Epoch: 210
394
+ 2023-02-09 04:38:48,344 32k INFO ====> Epoch: 211
395
+ 2023-02-09 04:40:03,265 32k INFO Train Epoch: 212 [94%]
396
+ 2023-02-09 04:40:03,266 32k INFO [2.545370578765869, 2.182567596435547, 11.445052146911621, 18.725894927978516, 1.0447421073913574, 14200, 9.739681767887146e-05]
397
+ 2023-02-09 04:40:06,510 32k INFO ====> Epoch: 212
398
+ 2023-02-09 04:41:23,719 32k INFO ====> Epoch: 213
399
+ 2023-02-09 04:42:40,924 32k INFO ====> Epoch: 214
400
+ 2023-02-09 04:43:54,429 32k INFO Train Epoch: 215 [93%]
401
+ 2023-02-09 04:43:54,429 32k INFO [2.492203950881958, 2.0672144889831543, 7.254634380340576, 17.217519760131836, 1.2190320491790771, 14400, 9.736029843752747e-05]
402
+ 2023-02-09 04:43:58,496 32k INFO ====> Epoch: 215
403
+ 2023-02-09 04:45:16,448 32k INFO ====> Epoch: 216
404
+ 2023-02-09 04:46:33,534 32k INFO ====> Epoch: 217
405
+ 2023-02-09 04:47:46,895 32k INFO Train Epoch: 218 [91%]
406
+ 2023-02-09 04:47:46,895 32k INFO [2.3578548431396484, 2.3237144947052, 9.533674240112305, 16.62578773498535, 0.6960291862487793, 14600, 9.732379288918723e-05]
407
+ 2023-02-09 04:47:51,775 32k INFO ====> Epoch: 218
408
+ 2023-02-09 04:49:09,870 32k INFO ====> Epoch: 219
409
+ 2023-02-09 04:50:27,058 32k INFO ====> Epoch: 220
410
+ 2023-02-09 04:51:38,829 32k INFO Train Epoch: 221 [90%]
411
+ 2023-02-09 04:51:38,829 32k INFO [2.4930238723754883, 2.0849337577819824, 10.19448471069336, 19.00885772705078, 0.8905020952224731, 14800, 9.728730102871649e-05]
412
+ 2023-02-09 04:51:44,567 32k INFO ====> Epoch: 221
413
+ 2023-02-09 04:53:01,748 32k INFO ====> Epoch: 222
414
+ 2023-02-09 04:54:18,922 32k INFO ====> Epoch: 223
415
+ 2023-02-09 04:55:29,903 32k INFO Train Epoch: 224 [88%]
416
+ 2023-02-09 04:55:29,903 32k INFO [2.5622823238372803, 2.385986566543579, 8.60080623626709, 15.33399486541748, 0.5517116189002991, 15000, 9.725082285098293e-05]
417
+ 2023-02-09 04:55:34,236 32k INFO Saving model and optimizer state at iteration 224 to ./logs\32k\G_15000.pth
418
+ 2023-02-09 04:55:51,622 32k INFO Saving model and optimizer state at iteration 224 to ./logs\32k\D_15000.pth
419
+ 2023-02-09 04:56:01,949 32k INFO ====> Epoch: 224
420
+ 2023-02-09 04:57:20,182 32k INFO ====> Epoch: 225
421
+ 2023-02-09 04:58:38,052 32k INFO ====> Epoch: 226
422
+ 2023-02-09 04:59:48,934 32k INFO Train Epoch: 227 [87%]
423
+ 2023-02-09 04:59:48,934 32k INFO [2.699136972427368, 2.1048390865325928, 7.052115440368652, 13.004521369934082, 0.8088562488555908, 15200, 9.721435835085619e-05]
424
+ 2023-02-09 04:59:56,318 32k INFO ====> Epoch: 227
425
+ 2023-02-09 05:01:14,332 32k INFO ====> Epoch: 228
426
+ 2023-02-09 05:02:32,242 32k INFO ====> Epoch: 229
427
+ 2023-02-09 05:03:41,538 32k INFO Train Epoch: 230 [85%]
428
+ 2023-02-09 05:03:41,538 32k INFO [2.5119049549102783, 2.255728244781494, 10.959632873535156, 17.58148765563965, 0.4408050775527954, 15400, 9.717790752320778e-05]
429
+ 2023-02-09 05:03:49,736 32k INFO ====> Epoch: 230
430
+ 2023-02-09 05:05:07,018 32k INFO ====> Epoch: 231
431
+ 2023-02-09 05:06:24,410 32k INFO ====> Epoch: 232
432
+ 2023-02-09 05:07:32,817 32k INFO Train Epoch: 233 [84%]
433
+ 2023-02-09 05:07:32,817 32k INFO [2.4837000370025635, 2.74263858795166, 8.370596885681152, 18.025814056396484, 0.5485965609550476, 15600, 9.714147036291117e-05]
434
+ 2023-02-09 05:07:41,860 32k INFO ====> Epoch: 233
435
+ 2023-02-09 05:08:59,157 32k INFO ====> Epoch: 234
436
+ 2023-02-09 05:10:16,236 32k INFO ====> Epoch: 235
437
+ 2023-02-09 05:11:23,813 32k INFO Train Epoch: 236 [82%]
438
+ 2023-02-09 05:11:23,814 32k INFO [2.615351915359497, 2.0890252590179443, 7.356450080871582, 16.96343994140625, 0.756279706954956, 15800, 9.710504686484176e-05]
439
+ 2023-02-09 05:11:33,681 32k INFO ====> Epoch: 236
440
+ 2023-02-09 05:12:51,784 32k INFO ====> Epoch: 237
441
+ 2023-02-09 05:14:09,960 32k INFO ====> Epoch: 238
442
+ 2023-02-09 05:15:16,775 32k INFO Train Epoch: 239 [81%]
443
+ 2023-02-09 05:15:16,775 32k INFO [2.424504518508911, 2.7784013748168945, 9.054712295532227, 16.200336456298828, 0.7321522831916809, 16000, 9.706863702387684e-05]
444
+ 2023-02-09 05:15:21,217 32k INFO Saving model and optimizer state at iteration 239 to ./logs\32k\G_16000.pth
445
+ 2023-02-09 05:15:38,638 32k INFO Saving model and optimizer state at iteration 239 to ./logs\32k\D_16000.pth
446
+ 2023-02-09 05:15:52,955 32k INFO ====> Epoch: 239
447
+ 2023-02-09 05:17:11,161 32k INFO ====> Epoch: 240
448
+ 2023-02-09 05:18:29,322 32k INFO ====> Epoch: 241
449
+ 2023-02-09 05:19:35,475 32k INFO Train Epoch: 242 [79%]
450
+ 2023-02-09 05:19:35,476 32k INFO [2.4205849170684814, 2.218247413635254, 9.974660873413086, 17.676557540893555, 0.6420414447784424, 16200, 9.703224083489565e-05]
451
+ 2023-02-09 05:19:47,005 32k INFO ====> Epoch: 242
452
+ 2023-02-09 05:21:04,968 32k INFO ====> Epoch: 243
453
+ 2023-02-09 05:22:22,082 32k INFO ====> Epoch: 244
454
+ 2023-02-09 05:23:27,331 32k INFO Train Epoch: 245 [78%]
455
+ 2023-02-09 05:23:27,332 32k INFO [2.514897584915161, 2.4238204956054688, 9.19957447052002, 17.169466018676758, 0.6433432698249817, 16400, 9.699585829277933e-05]
456
+ 2023-02-09 05:23:39,660 32k INFO ====> Epoch: 245
457
+ 2023-02-09 05:24:56,840 32k INFO ====> Epoch: 246
458
+ 2023-02-09 05:26:14,893 32k INFO ====> Epoch: 247
459
+ 2023-02-09 05:27:19,222 32k INFO Train Epoch: 248 [76%]
460
+ 2023-02-09 05:27:19,223 32k INFO [2.485236406326294, 2.0981593132019043, 8.128880500793457, 17.695823669433594, 0.830685555934906, 16600, 9.695948939241093e-05]
461
+ 2023-02-09 05:27:32,499 32k INFO ====> Epoch: 248
462
+ 2023-02-09 05:28:49,748 32k INFO ====> Epoch: 249
463
+ 2023-02-09 05:30:07,019 32k INFO ====> Epoch: 250
464
+ 2023-02-09 05:31:10,400 32k INFO Train Epoch: 251 [75%]
465
+ 2023-02-09 05:31:10,400 32k INFO [2.519704580307007, 2.247236490249634, 8.27247428894043, 16.95021629333496, 0.7045202255249023, 16800, 9.692313412867544e-05]
466
+ 2023-02-09 05:31:24,372 32k INFO ====> Epoch: 251
467
+ 2023-02-09 05:32:41,518 32k INFO ====> Epoch: 252
468
+ 2023-02-09 05:33:58,597 32k INFO ====> Epoch: 253
469
+ 2023-02-09 05:35:01,233 32k INFO Train Epoch: 254 [73%]
470
+ 2023-02-09 05:35:01,233 32k INFO [2.723464012145996, 2.004565477371216, 8.685633659362793, 17.136409759521484, 0.7529882788658142, 17000, 9.68867924964598e-05]
471
+ 2023-02-09 05:35:05,562 32k INFO Saving model and optimizer state at iteration 254 to ./logs\32k\G_17000.pth
472
+ 2023-02-09 05:35:21,651 32k INFO Saving model and optimizer state at iteration 254 to ./logs\32k\D_17000.pth
473
+ 2023-02-09 05:35:40,285 32k INFO ====> Epoch: 254
474
+ 2023-02-09 05:36:58,360 32k INFO ====> Epoch: 255
475
+ 2023-02-09 05:38:16,245 32k INFO ====> Epoch: 256
476
+ 2023-02-09 05:39:18,827 32k INFO Train Epoch: 257 [72%]
477
+ 2023-02-09 05:39:18,828 32k INFO [2.5846078395843506, 2.0677099227905273, 7.601481914520264, 16.82730484008789, 0.46195539832115173, 17200, 9.685046449065278e-05]
478
+ 2023-02-09 05:39:34,463 32k INFO ====> Epoch: 257
479
+ 2023-02-09 05:40:52,484 32k INFO ====> Epoch: 258
480
+ 2023-02-09 05:42:09,468 32k INFO ====> Epoch: 259
481
+ 2023-02-09 05:43:10,293 32k INFO Train Epoch: 260 [70%]
482
+ 2023-02-09 05:43:10,294 32k INFO [2.612074375152588, 2.061795234680176, 9.713607788085938, 17.434011459350586, 1.13643217086792, 17400, 9.681415010614512e-05]
483
+ 2023-02-09 05:43:26,783 32k INFO ====> Epoch: 260
484
+ 2023-02-09 05:44:43,852 32k INFO ====> Epoch: 261
485
+ 2023-02-09 05:46:00,940 32k INFO ====> Epoch: 262
486
+ 2023-02-09 05:47:00,945 32k INFO Train Epoch: 263 [69%]
487
+ 2023-02-09 05:47:00,945 32k INFO [2.5661118030548096, 2.090351104736328, 10.91147232055664, 18.185489654541016, 0.6497505903244019, 17600, 9.67778493378295e-05]
488
+ 2023-02-09 05:47:18,327 32k INFO ====> Epoch: 263
489
+ 2023-02-09 05:48:35,339 32k INFO ====> Epoch: 264
490
+ 2023-02-09 05:49:52,519 32k INFO ====> Epoch: 265
491
+ 2023-02-09 05:50:51,655 32k INFO Train Epoch: 266 [67%]
492
+ 2023-02-09 05:50:51,656 32k INFO [2.441783905029297, 2.2755415439605713, 8.574409484863281, 17.144365310668945, 1.0087649822235107, 17800, 9.674156218060047e-05]
493
+ 2023-02-09 05:51:09,757 32k INFO ====> Epoch: 266
494
+ 2023-02-09 05:52:26,654 32k INFO ====> Epoch: 267
495
+ 2023-02-09 05:53:43,656 32k INFO ====> Epoch: 268
496
+ 2023-02-09 05:54:42,178 32k INFO Train Epoch: 269 [66%]
497
+ 2023-02-09 05:54:42,178 32k INFO [2.4041616916656494, 2.2542026042938232, 10.453770637512207, 16.896394729614258, 0.874467670917511, 18000, 9.670528862935451e-05]
498
+ 2023-02-09 05:54:46,567 32k INFO Saving model and optimizer state at iteration 269 to ./logs\32k\G_18000.pth
499
+ 2023-02-09 05:55:05,522 32k INFO Saving model and optimizer state at iteration 269 to ./logs\32k\D_18000.pth
500
+ 2023-02-09 05:55:28,321 32k INFO ====> Epoch: 269
501
+ 2023-02-09 05:56:46,319 32k INFO ====> Epoch: 270
502
+ 2023-02-09 05:58:04,206 32k INFO ====> Epoch: 271
503
+ 2023-02-09 05:59:02,672 32k INFO Train Epoch: 272 [64%]
504
+ 2023-02-09 05:59:02,672 32k INFO [2.449748992919922, 2.5339150428771973, 10.715926170349121, 18.00714683532715, 0.7727646231651306, 18200, 9.666902867899003e-05]
505
+ 2023-02-09 05:59:22,434 32k INFO ====> Epoch: 272
506
+ 2023-02-09 06:00:40,362 32k INFO ====> Epoch: 273
507
+ 2023-02-09 06:01:58,348 32k INFO ====> Epoch: 274
508
+ 2023-02-09 06:02:55,204 32k INFO Train Epoch: 275 [63%]
509
+ 2023-02-09 06:02:55,204 32k INFO [2.5664258003234863, 2.2539126873016357, 8.40420913696289, 19.330455780029297, 1.2545685768127441, 18400, 9.663278232440732e-05]
510
+ 2023-02-09 06:03:15,780 32k INFO ====> Epoch: 275
511
+ 2023-02-09 06:04:33,087 32k INFO ====> Epoch: 276
512
+ 2023-02-09 06:05:50,254 32k INFO ====> Epoch: 277
513
+ 2023-02-09 06:06:46,232 32k INFO Train Epoch: 278 [61%]
514
+ 2023-02-09 06:06:46,233 32k INFO [2.5849533081054688, 2.1905155181884766, 7.024458885192871, 16.025548934936523, 0.6339879035949707, 18600, 9.659654956050859e-05]
515
+ 2023-02-09 06:07:07,698 32k INFO ====> Epoch: 278
516
+ 2023-02-09 06:08:24,695 32k INFO ====> Epoch: 279
517
+ 2023-02-09 06:09:41,686 32k INFO ====> Epoch: 280
518
+ 2023-02-09 06:10:36,725 32k INFO Train Epoch: 281 [60%]
519
+ 2023-02-09 06:10:36,725 32k INFO [2.621020793914795, 1.9545187950134277, 8.0020112991333, 14.753083229064941, 0.5816468000411987, 18800, 9.656033038219798e-05]
520
+ 2023-02-09 06:10:58,970 32k INFO ====> Epoch: 281
521
+ 2023-02-09 06:12:16,092 32k INFO ====> Epoch: 282
522
+ 2023-02-09 06:13:34,226 32k INFO ====> Epoch: 283
523
+ 2023-02-09 06:14:29,367 32k INFO Train Epoch: 284 [58%]
524
+ 2023-02-09 06:14:29,367 32k INFO [2.5978457927703857, 2.0676093101501465, 10.083847045898438, 15.90844440460205, 0.649118959903717, 19000, 9.652412478438153e-05]
525
+ 2023-02-09 06:14:34,626 32k INFO Saving model and optimizer state at iteration 284 to ./logs\32k\G_19000.pth
526
+ 2023-02-09 06:14:53,784 32k INFO Saving model and optimizer state at iteration 284 to ./logs\32k\D_19000.pth
527
+ 2023-02-09 06:15:20,487 32k INFO ====> Epoch: 284
528
+ 2023-02-09 06:16:38,462 32k INFO ====> Epoch: 285
529
+ 2023-02-09 06:17:56,373 32k INFO ====> Epoch: 286
530
+ 2023-02-09 06:18:49,853 32k INFO Train Epoch: 287 [57%]
531
+ 2023-02-09 06:18:49,853 32k INFO [2.6181414127349854, 2.086658477783203, 7.769612789154053, 13.018095970153809, 0.7441149950027466, 19200, 9.64879327619672e-05]
532
+ 2023-02-09 06:19:13,818 32k INFO ====> Epoch: 287
533
+ 2023-02-09 06:20:30,767 32k INFO ====> Epoch: 288
534
+ 2023-02-09 06:21:47,755 32k INFO ====> Epoch: 289
535
+ 2023-02-09 06:22:41,198 32k INFO Train Epoch: 290 [55%]
536
+ 2023-02-09 06:22:41,198 32k INFO [2.5569255352020264, 2.232271671295166, 9.287805557250977, 17.64458656311035, 0.9140328168869019, 19400, 9.645175430986486e-05]
537
+ 2023-02-09 06:23:05,925 32k INFO ====> Epoch: 290
538
+ 2023-02-09 06:24:23,877 32k INFO ====> Epoch: 291
539
+ 2023-02-09 06:25:41,896 32k INFO ====> Epoch: 292
540
+ 2023-02-09 06:26:34,600 32k INFO Train Epoch: 293 [54%]
541
+ 2023-02-09 06:26:34,600 32k INFO [2.3312571048736572, 2.471904993057251, 8.659703254699707, 14.936263084411621, 0.5374557971954346, 19600, 9.641558942298625e-05]
542
+ 2023-02-09 06:27:00,329 32k INFO ====> Epoch: 293
543
+ 2023-02-09 06:28:17,379 32k INFO ====> Epoch: 294
544
+ 2023-02-09 06:29:34,319 32k INFO ====> Epoch: 295
545
+ 2023-02-09 06:30:25,273 32k INFO Train Epoch: 296 [52%]
546
+ 2023-02-09 06:30:25,273 32k INFO [2.580073118209839, 2.0791285037994385, 6.087092876434326, 15.868849754333496, 0.6256889700889587, 19800, 9.637943809624507e-05]
547
+ 2023-02-09 06:30:51,672 32k INFO ====> Epoch: 296
548
+ 2023-02-09 06:32:08,760 32k INFO ====> Epoch: 297
549
+ 2023-02-09 06:33:25,874 32k INFO ====> Epoch: 298
550
+ 2023-02-09 06:34:16,143 32k INFO Train Epoch: 299 [51%]
551
+ 2023-02-09 06:34:16,143 32k INFO [2.7276787757873535, 2.1408498287200928, 7.651435852050781, 13.118131637573242, 0.8090221881866455, 20000, 9.634330032455689e-05]
552
+ 2023-02-09 06:34:20,490 32k INFO Saving model and optimizer state at iteration 299 to ./logs\32k\G_20000.pth
553
+ 2023-02-09 06:34:38,151 32k INFO Saving model and optimizer state at iteration 299 to ./logs\32k\D_20000.pth
554
+ 2023-02-09 06:35:09,071 32k INFO ====> Epoch: 299
555
+ 2023-02-09 06:36:26,919 32k INFO ====> Epoch: 300
556
+ 2023-02-09 06:37:44,715 32k INFO ====> Epoch: 301
557
+ 2023-02-09 06:38:34,897 32k INFO Train Epoch: 302 [49%]
558
+ 2023-02-09 06:38:34,897 32k INFO [2.3907358646392822, 2.586805820465088, 12.788217544555664, 17.23185157775879, 0.9444102048873901, 20200, 9.63071761028392e-05]
559
+ 2023-02-09 06:39:02,960 32k INFO ====> Epoch: 302
560
+ 2023-02-09 06:40:19,965 32k INFO ====> Epoch: 303
561
+ 2023-02-09 06:41:36,910 32k INFO ====> Epoch: 304
562
+ 2023-02-09 06:42:25,288 32k INFO Train Epoch: 305 [48%]
563
+ 2023-02-09 06:42:25,288 32k INFO [2.543513059616089, 2.2357592582702637, 11.66889476776123, 17.664663314819336, 0.9346078038215637, 20400, 9.627106542601141e-05]
564
+ 2023-02-09 06:42:54,185 32k INFO ====> Epoch: 305
565
+ 2023-02-09 06:44:11,243 32k INFO ====> Epoch: 306
566
+ 2023-02-09 06:45:28,529 32k INFO ====> Epoch: 307
567
+ 2023-02-09 06:46:16,141 32k INFO Train Epoch: 308 [46%]
568
+ 2023-02-09 06:46:16,142 32k INFO [2.2425007820129395, 2.718269109725952, 10.438041687011719, 14.429862022399902, 0.450414776802063, 20600, 9.62349682889948e-05]
569
+ 2023-02-09 06:46:45,885 32k INFO ====> Epoch: 308
570
+ 2023-02-09 06:48:02,967 32k INFO ====> Epoch: 309
571
+ 2023-02-09 06:49:20,096 32k INFO ====> Epoch: 310
572
+ 2023-02-09 06:50:06,904 32k INFO Train Epoch: 311 [45%]
573
+ 2023-02-09 06:50:06,904 32k INFO [2.511903762817383, 2.2772610187530518, 7.602670192718506, 16.86819839477539, 0.4354688823223114, 20800, 9.619888468671259e-05]
574
+ 2023-02-09 06:50:37,485 32k INFO ====> Epoch: 311
575
+ 2023-02-09 06:51:54,650 32k INFO ====> Epoch: 312
576
+ 2023-02-09 06:53:11,706 32k INFO ====> Epoch: 313
577
+ 2023-02-09 06:53:57,594 32k INFO Train Epoch: 314 [43%]
578
+ 2023-02-09 06:53:57,595 32k INFO [2.548445463180542, 2.205153226852417, 9.582110404968262, 17.887943267822266, 0.8334051966667175, 21000, 9.61628146140899e-05]
579
+ 2023-02-09 06:54:01,984 32k INFO Saving model and optimizer state at iteration 314 to ./logs\32k\G_21000.pth
580
+ 2023-02-09 06:54:19,329 32k INFO Saving model and optimizer state at iteration 314 to ./logs\32k\D_21000.pth
581
+ 2023-02-09 06:54:54,644 32k INFO ====> Epoch: 314
582
+ 2023-02-09 06:56:12,706 32k INFO ====> Epoch: 315
583
+ 2023-02-09 06:57:30,625 32k INFO ====> Epoch: 316
584
+ 2023-02-09 06:58:16,770 32k INFO Train Epoch: 317 [42%]
585
+ 2023-02-09 06:58:16,770 32k INFO [2.5582966804504395, 2.0421464443206787, 9.006149291992188, 15.637856483459473, 1.0011146068572998, 21200, 9.612675806605373e-05]
586
+ 2023-02-09 06:58:48,980 32k INFO ====> Epoch: 317
587
+ 2023-02-09 07:00:06,236 32k INFO ====> Epoch: 318
588
+ 2023-02-09 07:01:23,359 32k INFO ====> Epoch: 319
589
+ 2023-02-09 07:02:08,658 32k INFO Train Epoch: 320 [40%]
590
+ 2023-02-09 07:02:08,658 32k INFO [2.39801287651062, 2.3436264991760254, 10.229707717895508, 19.279922485351562, 0.727281391620636, 21400, 9.609071503753299e-05]
591
+ 2023-02-09 07:02:41,674 32k INFO ====> Epoch: 320
592
+ 2023-02-09 07:03:59,633 32k INFO ====> Epoch: 321
593
+ 2023-02-09 07:05:17,720 32k INFO ====> Epoch: 322
594
+ 2023-02-09 07:06:01,216 32k INFO Train Epoch: 323 [39%]
595
+ 2023-02-09 07:06:01,216 32k INFO [2.6565518379211426, 2.263072967529297, 6.5126566886901855, 15.140740394592285, 0.8894218802452087, 21600, 9.60546855234585e-05]
596
+ 2023-02-09 07:06:35,104 32k INFO ====> Epoch: 323
597
+ 2023-02-09 07:07:52,224 32k INFO ====> Epoch: 324
598
+ 2023-02-09 07:09:09,258 32k INFO ====> Epoch: 325
599
+ 2023-02-09 07:09:51,895 32k INFO Train Epoch: 326 [37%]
600
+ 2023-02-09 07:09:51,896 32k INFO [2.869607448577881, 1.8302068710327148, 5.35368013381958, 10.451361656188965, 0.7921611070632935, 21800, 9.601866951876297e-05]
601
+ 2023-02-09 07:10:26,577 32k INFO ====> Epoch: 326
602
+ 2023-02-09 07:11:43,674 32k INFO ====> Epoch: 327
603
+ 2023-02-09 07:13:00,883 32k INFO ====> Epoch: 328
604
+ 2023-02-09 07:13:42,678 32k INFO Train Epoch: 329 [36%]
605
+ 2023-02-09 07:13:42,678 32k INFO [2.542959451675415, 2.098376512527466, 9.891724586486816, 15.331761360168457, 0.7128269076347351, 22000, 9.5982667018381e-05]
606
+ 2023-02-09 07:13:47,172 32k INFO Saving model and optimizer state at iteration 329 to ./logs\32k\G_22000.pth
607
+ 2023-02-09 07:14:04,902 32k INFO Saving model and optimizer state at iteration 329 to ./logs\32k\D_22000.pth
608
+ 2023-02-09 07:14:44,184 32k INFO ====> Epoch: 329