ppo-Huggy / run_logs /training_status.json
sipheiroce's picture
Huggy
3ed525f
{
"Huggy": {
"checkpoints": [
{
"steps": 499930,
"file_path": "results/Huggy/Huggy/Huggy-499930.onnx",
"reward": 3.8831948014192803,
"creation_time": 1671074127.2544565,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-499930.pt"
]
},
{
"steps": 999927,
"file_path": "results/Huggy/Huggy/Huggy-999927.onnx",
"reward": 3.824881578313893,
"creation_time": 1671074708.879421,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999927.pt"
]
},
{
"steps": 1499949,
"file_path": "results/Huggy/Huggy/Huggy-1499949.onnx",
"reward": 4.029020522673106,
"creation_time": 1671075292.685461,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1499949.pt"
]
},
{
"steps": 1999958,
"file_path": "results/Huggy/Huggy/Huggy-1999958.onnx",
"reward": 3.57866946993203,
"creation_time": 1671075878.2190216,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999958.pt"
]
},
{
"steps": 2000046,
"file_path": "results/Huggy/Huggy/Huggy-2000046.onnx",
"reward": 3.6096751570701597,
"creation_time": 1671075878.3433635,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000046.pt"
]
}
],
"final_checkpoint": {
"steps": 2000046,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6096751570701597,
"creation_time": 1671075878.3433635,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000046.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}