ppo-Huggy / run_logs /training_status.json
vitorhgomes's picture
Huggy
3e75ee7
{
"Huggy": {
"checkpoints": [
{
"steps": 499994,
"file_path": "results/Huggy/Huggy/Huggy-499994.onnx",
"reward": 3.996315734330998,
"creation_time": 1670889088.6237512,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-499994.pt"
]
},
{
"steps": 999962,
"file_path": "results/Huggy/Huggy/Huggy-999962.onnx",
"reward": 3.746860443662714,
"creation_time": 1670889629.260034,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999962.pt"
]
},
{
"steps": 1499643,
"file_path": "results/Huggy/Huggy/Huggy-1499643.onnx",
"reward": 3.8433495142738225,
"creation_time": 1670890168.2184858,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1499643.pt"
]
},
{
"steps": 1999964,
"file_path": "results/Huggy/Huggy/Huggy-1999964.onnx",
"reward": 3.8665436784426372,
"creation_time": 1670890711.3599691,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999964.pt"
]
},
{
"steps": 2000023,
"file_path": "results/Huggy/Huggy/Huggy-2000023.onnx",
"reward": 3.865325839288773,
"creation_time": 1670890711.483272,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000023.pt"
]
}
],
"final_checkpoint": {
"steps": 2000023,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.865325839288773,
"creation_time": 1670890711.483272,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000023.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}