ppo-Huggy / run_logs /training_status.json
odedmou's picture
Huggy
a0d67f2
{
"Huggy": {
"checkpoints": [
{
"steps": 199993,
"file_path": "results/Huggy/Huggy/Huggy-199993.onnx",
"reward": 3.561817161853497,
"creation_time": 1671435597.9517357,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199993.pt"
]
},
{
"steps": 399968,
"file_path": "results/Huggy/Huggy/Huggy-399968.onnx",
"reward": 3.7929170685754694,
"creation_time": 1671435808.3376894,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399968.pt"
]
},
{
"steps": 599946,
"file_path": "results/Huggy/Huggy/Huggy-599946.onnx",
"reward": 3.9347189524594475,
"creation_time": 1671436043.190582,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599946.pt"
]
},
{
"steps": 799983,
"file_path": "results/Huggy/Huggy/Huggy-799983.onnx",
"reward": 3.716039171376204,
"creation_time": 1671436281.8029773,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799983.pt"
]
},
{
"steps": 999970,
"file_path": "results/Huggy/Huggy/Huggy-999970.onnx",
"reward": 3.6303674099328633,
"creation_time": 1671436522.6943557,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999970.pt"
]
},
{
"steps": 1199903,
"file_path": "results/Huggy/Huggy/Huggy-1199903.onnx",
"reward": 4.0400610916754776,
"creation_time": 1671436763.2577884,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199903.pt"
]
},
{
"steps": 1399926,
"file_path": "results/Huggy/Huggy/Huggy-1399926.onnx",
"reward": 3.8639533892273903,
"creation_time": 1671436974.9072838,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399926.pt"
]
},
{
"steps": 1599992,
"file_path": "results/Huggy/Huggy/Huggy-1599992.onnx",
"reward": 3.924788598125503,
"creation_time": 1671437190.8897948,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599992.pt"
]
},
{
"steps": 1799996,
"file_path": "results/Huggy/Huggy/Huggy-1799996.onnx",
"reward": 3.6985489835684326,
"creation_time": 1671437405.7638175,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799996.pt"
]
},
{
"steps": 1999987,
"file_path": "results/Huggy/Huggy/Huggy-1999987.onnx",
"reward": 3.634934371915357,
"creation_time": 1671437626.1649287,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999987.pt"
]
},
{
"steps": 2000099,
"file_path": "results/Huggy/Huggy/Huggy-2000099.onnx",
"reward": 3.7016681313514708,
"creation_time": 1671437626.2843227,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000099.pt"
]
}
],
"final_checkpoint": {
"steps": 2000099,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7016681313514708,
"creation_time": 1671437626.2843227,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000099.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}