ppo-Pyramids / run_logs /timers.json
Youngdal's picture
First Push
44f2f3a
{
"name": "root",
"gauges": {
"Pyramids.Policy.Entropy.mean": {
"value": 0.4891842007637024,
"min": 0.4864496886730194,
"max": 1.40961492061615,
"count": 33
},
"Pyramids.Policy.Entropy.sum": {
"value": 14550.294921875,
"min": 14550.294921875,
"max": 42762.078125,
"count": 33
},
"Pyramids.Step.mean": {
"value": 989906.0,
"min": 29952.0,
"max": 989906.0,
"count": 33
},
"Pyramids.Step.sum": {
"value": 989906.0,
"min": 29952.0,
"max": 989906.0,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.mean": {
"value": 0.1417328417301178,
"min": -0.10461755096912384,
"max": 0.2163393199443817,
"count": 33
},
"Pyramids.Policy.ExtrinsicValueEstimate.sum": {
"value": 35.7166748046875,
"min": -25.21282958984375,
"max": 55.599205017089844,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.mean": {
"value": 0.01762525551021099,
"min": 0.0017850169679149985,
"max": 0.3551114797592163,
"count": 33
},
"Pyramids.Policy.RndValueEstimate.sum": {
"value": 4.441564559936523,
"min": 0.4551793336868286,
"max": 85.93698120117188,
"count": 33
},
"Pyramids.Losses.PolicyLoss.mean": {
"value": 0.07041627789551604,
"min": 0.06505331904937738,
"max": 0.07386146823510302,
"count": 33
},
"Pyramids.Losses.PolicyLoss.sum": {
"value": 0.9858278905372245,
"min": 0.5170302776457212,
"max": 1.0411579476397796,
"count": 33
},
"Pyramids.Losses.ValueLoss.mean": {
"value": 0.008685142988761208,
"min": 0.0005676662027157316,
"max": 0.011656984981079407,
"count": 33
},
"Pyramids.Losses.ValueLoss.sum": {
"value": 0.12159200184265692,
"min": 0.006617539695736713,
"max": 0.1631977897351117,
"count": 33
},
"Pyramids.Policy.LearningRate.mean": {
"value": 7.269204719821425e-06,
"min": 7.269204719821425e-06,
"max": 0.00029515063018788575,
"count": 33
},
"Pyramids.Policy.LearningRate.sum": {
"value": 0.00010176886607749995,
"min": 0.00010176886607749995,
"max": 0.0033824849725050997,
"count": 33
},
"Pyramids.Policy.Epsilon.mean": {
"value": 0.10242303571428572,
"min": 0.10242303571428572,
"max": 0.19838354285714285,
"count": 33
},
"Pyramids.Policy.Epsilon.sum": {
"value": 1.4339225000000002,
"min": 1.3886848,
"max": 2.5274948999999998,
"count": 33
},
"Pyramids.Policy.Beta.mean": {
"value": 0.00025206126785714275,
"min": 0.00025206126785714275,
"max": 0.00983851593142857,
"count": 33
},
"Pyramids.Policy.Beta.sum": {
"value": 0.0035288577499999986,
"min": 0.0035288577499999986,
"max": 0.11277674051000003,
"count": 33
},
"Pyramids.Losses.RNDLoss.mean": {
"value": 0.012688325718045235,
"min": 0.012688325718045235,
"max": 0.5393239855766296,
"count": 33
},
"Pyramids.Losses.RNDLoss.sum": {
"value": 0.17763656377792358,
"min": 0.17763656377792358,
"max": 3.775268077850342,
"count": 33
},
"Pyramids.Environment.EpisodeLength.mean": {
"value": 723.6590909090909,
"min": 552.4528301886793,
"max": 999.0,
"count": 33
},
"Pyramids.Environment.EpisodeLength.sum": {
"value": 31841.0,
"min": 15984.0,
"max": 32933.0,
"count": 33
},
"Pyramids.Environment.CumulativeReward.mean": {
"value": 0.5488363280892372,
"min": -1.0000000521540642,
"max": 0.9945773306601452,
"count": 33
},
"Pyramids.Environment.CumulativeReward.sum": {
"value": 24.148798435926437,
"min": -29.977801725268364,
"max": 52.7125985249877,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.mean": {
"value": 0.5488363280892372,
"min": -1.0000000521540642,
"max": 0.9945773306601452,
"count": 33
},
"Pyramids.Policy.ExtrinsicReward.sum": {
"value": 24.148798435926437,
"min": -29.977801725268364,
"max": 52.7125985249877,
"count": 33
},
"Pyramids.Policy.RndReward.mean": {
"value": 0.09604682175267953,
"min": 0.07568903431243632,
"max": 11.286061864346266,
"count": 33
},
"Pyramids.Policy.RndReward.sum": {
"value": 4.2260601571178995,
"min": 3.872648993972689,
"max": 180.57698982954025,
"count": 33
},
"Pyramids.IsTraining.mean": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
},
"Pyramids.IsTraining.sum": {
"value": 1.0,
"min": 1.0,
"max": 1.0,
"count": 33
}
},
"metadata": {
"timer_format_version": "0.1.0",
"start_time_seconds": "1679052446",
"python_version": "3.9.16 (main, Dec 7 2022, 01:11:51) \n[GCC 9.4.0]",
"command_line_arguments": "/usr/local/bin/mlagents-learn ./config/ppo/PyramidsRND.yaml --env=./training-envs-executables/linux/Pyramids/Pyramids --run-id=Pyramids Training --no-graphics",
"mlagents_version": "0.31.0.dev0",
"mlagents_envs_version": "0.31.0.dev0",
"communication_protocol_version": "1.5.0",
"pytorch_version": "1.11.0+cu102",
"numpy_version": "1.21.2",
"end_time_seconds": "1679054451"
},
"total": 2005.49943537,
"count": 1,
"self": 0.42441031999942425,
"children": {
"run_training.setup": {
"total": 0.10129927800016958,
"count": 1,
"self": 0.10129927800016958
},
"TrainerController.start_learning": {
"total": 2004.9737257720003,
"count": 1,
"self": 1.3442122149617717,
"children": {
"TrainerController._reset_env": {
"total": 7.122609523999927,
"count": 1,
"self": 7.122609523999927
},
"TrainerController.advance": {
"total": 1996.4164098810384,
"count": 63302,
"self": 1.4215391701077351,
"children": {
"env_step": {
"total": 1378.3127125399596,
"count": 63302,
"self": 1271.5395344079243,
"children": {
"SubprocessEnvManager._take_step": {
"total": 105.97440320907754,
"count": 63302,
"self": 4.581719118059027,
"children": {
"TorchPolicy.evaluate": {
"total": 101.39268409101851,
"count": 62556,
"self": 101.39268409101851
}
}
},
"workers": {
"total": 0.7987749229578185,
"count": 63302,
"self": 0.0,
"children": {
"worker_root": {
"total": 2000.7196232280012,
"count": 63302,
"is_parallel": true,
"self": 839.9582141129495,
"children": {
"run_training.setup": {
"total": 0.0,
"count": 0,
"is_parallel": true,
"self": 0.0,
"children": {
"steps_from_proto": {
"total": 0.0016882180002539826,
"count": 1,
"is_parallel": true,
"self": 0.0005584390005424211,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0011297789997115615,
"count": 8,
"is_parallel": true,
"self": 0.0011297789997115615
}
}
},
"UnityEnvironment.step": {
"total": 0.047266652999951475,
"count": 1,
"is_parallel": true,
"self": 0.0005196729998715455,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 0.00044502900027509895,
"count": 1,
"is_parallel": true,
"self": 0.00044502900027509895
},
"communicator.exchange": {
"total": 0.04464610900004118,
"count": 1,
"is_parallel": true,
"self": 0.04464610900004118
},
"steps_from_proto": {
"total": 0.001655841999763652,
"count": 1,
"is_parallel": true,
"self": 0.00034281399894098286,
"children": {
"_process_rank_one_or_two_observation": {
"total": 0.0013130280008226691,
"count": 8,
"is_parallel": true,
"self": 0.0013130280008226691
}
}
}
}
}
}
},
"UnityEnvironment.step": {
"total": 1160.7614091150517,
"count": 63301,
"is_parallel": true,
"self": 30.328445358142744,
"children": {
"UnityEnvironment._generate_step_input": {
"total": 22.48475628995766,
"count": 63301,
"is_parallel": true,
"self": 22.48475628995766
},
"communicator.exchange": {
"total": 1019.2240604020349,
"count": 63301,
"is_parallel": true,
"self": 1019.2240604020349
},
"steps_from_proto": {
"total": 88.72414706491645,
"count": 63301,
"is_parallel": true,
"self": 18.813858682070986,
"children": {
"_process_rank_one_or_two_observation": {
"total": 69.91028838284547,
"count": 506408,
"is_parallel": true,
"self": 69.91028838284547
}
}
}
}
}
}
}
}
}
}
},
"trainer_advance": {
"total": 616.682158170971,
"count": 63302,
"self": 2.392213496074419,
"children": {
"process_trajectory": {
"total": 112.86689937990195,
"count": 63302,
"self": 112.67104041690209,
"children": {
"RLTrainer._checkpoint": {
"total": 0.19585896299986416,
"count": 2,
"self": 0.19585896299986416
}
}
},
"_update_policy": {
"total": 501.4230452949946,
"count": 444,
"self": 317.78075527905276,
"children": {
"TorchPPOOptimizer.update": {
"total": 183.64229001594185,
"count": 22785,
"self": 183.64229001594185
}
}
}
}
}
}
},
"trainer_threads": {
"total": 1.0530002327868715e-06,
"count": 1,
"self": 1.0530002327868715e-06
},
"TrainerController._save_models": {
"total": 0.0904930990000139,
"count": 1,
"self": 0.0018201189996034373,
"children": {
"RLTrainer._checkpoint": {
"total": 0.08867298000041046,
"count": 1,
"self": 0.08867298000041046
}
}
}
}
}
}
}