ppo-Huggy / run_logs /training_status.json
joweyel's picture
Huggy
4022d92
{
"Huggy": {
"checkpoints": [
{
"steps": 199805,
"file_path": "results/Huggy/Huggy/Huggy-199805.onnx",
"reward": 3.1613538320247945,
"creation_time": 1684593902.5691836,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199805.pt"
]
},
{
"steps": 399835,
"file_path": "results/Huggy/Huggy/Huggy-399835.onnx",
"reward": 4.014951695998509,
"creation_time": 1684594134.6679616,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399835.pt"
]
},
{
"steps": 599953,
"file_path": "results/Huggy/Huggy/Huggy-599953.onnx",
"reward": 3.7234372049570084,
"creation_time": 1684594373.0502481,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599953.pt"
]
},
{
"steps": 799994,
"file_path": "results/Huggy/Huggy/Huggy-799994.onnx",
"reward": 3.9446364048608515,
"creation_time": 1684594615.2357411,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799994.pt"
]
},
{
"steps": 999996,
"file_path": "results/Huggy/Huggy/Huggy-999996.onnx",
"reward": 3.5683980039320886,
"creation_time": 1684594870.0700443,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999996.pt"
]
},
{
"steps": 1199946,
"file_path": "results/Huggy/Huggy/Huggy-1199946.onnx",
"reward": 3.7918554674834013,
"creation_time": 1684595142.6709292,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199946.pt"
]
},
{
"steps": 1399917,
"file_path": "results/Huggy/Huggy/Huggy-1399917.onnx",
"reward": 4.040991147926875,
"creation_time": 1684595402.2868445,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399917.pt"
]
},
{
"steps": 1599915,
"file_path": "results/Huggy/Huggy/Huggy-1599915.onnx",
"reward": 3.8812196795102003,
"creation_time": 1684595646.9275913,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599915.pt"
]
},
{
"steps": 1799968,
"file_path": "results/Huggy/Huggy/Huggy-1799968.onnx",
"reward": 3.737945978794623,
"creation_time": 1684595889.3458273,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799968.pt"
]
},
{
"steps": 1999991,
"file_path": "results/Huggy/Huggy/Huggy-1999991.onnx",
"reward": 3.8136403870124083,
"creation_time": 1684596136.6831894,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999991.pt"
]
},
{
"steps": 2000091,
"file_path": "results/Huggy/Huggy/Huggy-2000091.onnx",
"reward": 3.841152941280941,
"creation_time": 1684596136.811074,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000091.pt"
]
}
],
"final_checkpoint": {
"steps": 2000091,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.841152941280941,
"creation_time": 1684596136.811074,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000091.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}