poca-SoccerTwos / run_logs /timers.json
adotryx's picture
Initial commit
9506a3d verified
{
"name": "root",
"gauges": {
"SoccerTwos.Policy.Entropy.mean": {
"value": 2.2144265174865723,
"min": 2.183969736099243,
"max": 3.2108168601989746,
"count": 375
},
"SoccerTwos.Policy.Entropy.sum": {
"value": 45351.45703125,
"min": 23227.78125,
"max": 116587.7890625,
"count": 375
},
"SoccerTwos.Environment.EpisodeLength.mean": {
"value": 47.73529411764706,
"min": 43.75675675675676,
"max": 999.0,
"count": 375
},
"SoccerTwos.Environment.EpisodeLength.sum": {
"value": 19476.0,
"min": 11988.0,
"max": 30568.0,
"count": 375
},
"SoccerTwos.Self-play.ELO.mean": {
"value": 1490.867701880833,
"min": 1193.7116257247524,
"max": 1504.8553986004536,
"count": 368
},
"SoccerTwos.Self-play.ELO.sum": {
"value": 304137.01118368993,
"min": 2388.396315779494,
"max": 331093.3125768256,
"count": 368
},
"SoccerTwos.Step.mean": {
"value": 4999998.0,
"min": 1249893.0,
"max": 4999998.0,
"count": 376
},
"SoccerTwos.Step.sum": {
"value": 4999998.0,
"min": 1249893.0,
"max": 4999998.0,
"count": 376
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.mean": {
"value": 0.00677886325865984,
"min": -0.05910105258226395,
"max": 0.15611746907234192,
"count": 376
},
"SoccerTwos.Policy.ExtrinsicBaselineEstimate.sum": {
"value": 1.3828880786895752,
"min": -10.460886001586914,
"max": 29.03784942626953,
"count": 376
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.mean": {
"value": -0.001218882272951305,
"min": -0.06675509363412857,
"max": 0.15357138216495514,
"count": 376
},
"SoccerTwos.Policy.ExtrinsicValueEstimate.sum": {
"value": -0.24865198135375977,
"min": -11.815650939941406,
"max": 28.56427764892578,
"count": 376
},
"SoccerTwos.Environment.CumulativeReward.mean": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 376
},
"SoccerTwos.Environment.CumulativeReward.sum": {
"value": 0.0,
"min": 0.0,
"max": 0.0,
"count": 376
},
"SoccerTwos.Policy.ExtrinsicReward.mean": {
"value": 0.0748235285282135,
"min": -0.6666666666666666,
"max": 0.4263526756344861,
"count": 376
},
"SoccerTwos.Policy.ExtrinsicReward.sum": {
"value": 15.263999819755554,
"min": -33.54920029640198,
"max": 61.7603999376297,
"count": 376
},
"SoccerTwos.Environment.GroupCumulativeReward.mean": {
"value": 0.0748235285282135,
"min": -0.6666666666666666,
"max": 0.4263526756344861,
"count": 376
},
"SoccerTwos.Environment.GroupCumulativeReward.sum": {
"value": 15.263999819755554,
"min": -33.54920029640198,
"max": 61.7603999376297,
"count": 376
},
"SoccerTwos.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 376
},
"SoccerTwos.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 376
},
"SoccerTwos.Losses.PolicyLoss.mean": {
"value": 0.01930922961910255,
"min": 0.01153092454187572,
"max": 0.025863834377378225,
"count": 178
},
"SoccerTwos.Losses.PolicyLoss.sum": {
"value": 0.01930922961910255,
"min": 0.01153092454187572,
"max": 0.025863834377378225,
"count": 178
},
"SoccerTwos.Losses.ValueLoss.mean": {
"value": 0.10401499246557554,
"min": 0.00012666295685145694,
"max": 0.11355828319986662,
"count": 178
},
"SoccerTwos.Losses.ValueLoss.sum": {
"value": 0.10401499246557554,
"min": 0.00012666295685145694,
"max": 0.11355828319986662,
"count": 178
},
"SoccerTwos.Losses.BaselineLoss.mean": {
"value": 0.10656433428327243,
"min": 0.0001420305879340352,
"max": 0.11552603046099345,
"count": 178
},
"SoccerTwos.Losses.BaselineLoss.sum": {
"value": 0.10656433428327243,
"min": 0.0001420305879340352,
"max": 0.11552603046099345,
"count": 178
},
"SoccerTwos.Policy.LearningRate.mean": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 178
},
"SoccerTwos.Policy.LearningRate.sum": {
"value": 0.0003,
"min": 0.0003,
"max": 0.0003,
"count": 178
},
"SoccerTwos.Policy.Epsilon.mean": {
"value": 0.20000000000000007,
"min": 0.19999999999999996,
"max": 0.20000000000000007,
"count": 178
},
"SoccerTwos.Policy.Epsilon.sum": {
"value": 0.20000000000000007,
"min": 0.19999999999999996,
"max": 0.20000000000000007,
"count": 178
},
"SoccerTwos.Policy.Beta.mean": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 178
},
"SoccerTwos.Policy.Beta.sum": {
"value": 0.005000000000000001,
"min": 0.005,
"max": 0.005000000000000001,
"count": 178
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1729471278",
"python_version": "3.10.11 (tags/v3.10.11:7d4cc5a, Apr 5 2023, 00:38:17) [MSC v.1929 64 bit (AMD64)]",
"command_line_arguments": "\\\\?\\C:\\Users\\HP\\Documents\\unsoed_perkuliahan\\semester_7\\skripsi\\.venv\\Scripts\\mlagents-learn .\\config\\poca\\SoccerTwos.yaml --env=../SoccerTwos\\SoccerTwos.exe --run-id=SoccerTwos --no-graphics --resume",
"mlagents_version": "1.2.0.dev0",
"mlagents_envs_version": "1.2.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "2.5.0+cpu",
"numpy_version": "1.23.5",
"end_time_seconds": "1729481081"
},
"total": 9803.589370999951,
"count": 1,
"self": 1.0312823000131175,
"children": {
"run_training.setup": {
"total": 0.11100159992929548,
"count": 1,
"self": 0.11100159992929548
},
"TrainerController.start_learning": {
"total": 9802.447087100009,
"count": 1,
"self": 6.596104706870392,
"children": {
"TrainerController._reset_env": {
"total": 6.700602599885315,
"count": 20,
"self": 6.700602599885315
},
"TrainerController.advance": {
"total": 9788.985957093304,
"count": 250261,
"self": 5.873614368727431,
"children": {
"env_step": {
"total": 4494.554129914497,
"count": 250261,
"self": 3503.5780742662027,
"children": {
"SubprocessEnvManager._take_step": {
"total": 986.5883523586672,
"count": 250261,
"self": 34.71994428872131,
"children": {
"TorchPolicy.evaluate": {
"total": 951.8684080699459,
"count": 478040,
"self": 951.8684080699459
}
}
},
"workers": {
"total": 4.3877032896270975,
"count": 250261,
"self": 0.0,
"children": {
"worker_root": {
"total": 9788.83133592701,
"count": 250261,
"is_parallel": true,
"self": 7056.923214805662,
"children": {
"steps_from_proto": {
"total": 0.04099759995006025,
"count": 40,
"is_parallel": true,
"self": 0.008031599107198417,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.03296600084286183,
"count": 160,
"is_parallel": true,
"self": 0.03296600084286183
}
}
},
"UnityEnvironment.step": {
"total": 2731.8671235213988,
"count": 250261,
"is_parallel": true,
"self": 164.26025081053376,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 139.1125939968042,
"count": 250261,
"is_parallel": true,
"self": 139.1125939968042
},
"communicator.exchange": {
"total": 1925.3900607132819,
"count": 250261,
"is_parallel": true,
"self": 1925.3900607132819
},
"steps_from_proto": {
"total": 503.1042180007789,
"count": 500522,
"is_parallel": true,
"self": 91.37747572863009,
"children": {
"_process_rank_one_or_two_observation": {
"total": 411.72674227214884,
"count": 2002088,
"is_parallel": true,
"self": 411.72674227214884
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 5288.55821281008,
"count": 250261,
"self": 53.3054872516077,
"children": {
"process_trajectory": {
"total": 866.2250446581747,
"count": 250261,
"self": 864.9035427579656,
"children": {
"RLTrainer._checkpoint": {
"total": 1.321501900209114,
"count": 8,
"self": 1.321501900209114
}
}
},
"_update_policy": {
"total": 4369.027680900297,
"count": 178,
"self": 481.9659361997619,
"children": {
"TorchPOCAOptimizer.update": {
"total": 3887.061744700535,
"count": 5355,
"self": 3887.061744700535
}
}
}
}
}
}
},
"trainer_threads": {
"total": 9.00006853044033e-07,
"count": 1,
"self": 9.00006853044033e-07
},
"TrainerController._save_models": {
"total": 0.16442179994191974,
"count": 1,
"self": 0.04636959987692535,
"children": {
"RLTrainer._checkpoint": {
"total": 0.1180522000649944,
"count": 1,
"self": 0.1180522000649944
}
}
}
}
}
}
}