ppo-Huggy / run_logs /training_status.json
normanq's picture
Huggy
55953b9 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199991,
"file_path": "results/Huggy2/Huggy/Huggy-199991.onnx",
"reward": 3.5424056997665994,
"creation_time": 1732586671.2866917,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199991.pt"
]
},
{
"steps": 399935,
"file_path": "results/Huggy2/Huggy/Huggy-399935.onnx",
"reward": 3.644991656424294,
"creation_time": 1732586925.1010823,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399935.pt"
]
},
{
"steps": 599992,
"file_path": "results/Huggy2/Huggy/Huggy-599992.onnx",
"reward": 3.7793516417344413,
"creation_time": 1732587180.792309,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599992.pt"
]
},
{
"steps": 799998,
"file_path": "results/Huggy2/Huggy/Huggy-799998.onnx",
"reward": 3.8660511728108786,
"creation_time": 1732587439.3715827,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799998.pt"
]
},
{
"steps": 999961,
"file_path": "results/Huggy2/Huggy/Huggy-999961.onnx",
"reward": 3.436228351201862,
"creation_time": 1732587713.7954574,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999961.pt"
]
},
{
"steps": 1199852,
"file_path": "results/Huggy2/Huggy/Huggy-1199852.onnx",
"reward": 3.6862462814464125,
"creation_time": 1732587980.0558925,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199852.pt"
]
},
{
"steps": 1399989,
"file_path": "results/Huggy2/Huggy/Huggy-1399989.onnx",
"reward": 3.4015058522704265,
"creation_time": 1732588230.2336485,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399989.pt"
]
},
{
"steps": 1599933,
"file_path": "results/Huggy2/Huggy/Huggy-1599933.onnx",
"reward": 3.5104211568832397,
"creation_time": 1732588483.6931517,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599933.pt"
]
},
{
"steps": 1799898,
"file_path": "results/Huggy2/Huggy/Huggy-1799898.onnx",
"reward": 3.557202821969986,
"creation_time": 1732588735.2239501,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799898.pt"
]
},
{
"steps": 1999974,
"file_path": "results/Huggy2/Huggy/Huggy-1999974.onnx",
"reward": null,
"creation_time": 1732588987.5868158,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999974.pt"
]
},
{
"steps": 2000033,
"file_path": "results/Huggy2/Huggy/Huggy-2000033.onnx",
"reward": 2.3196654319763184,
"creation_time": 1732588987.7069652,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000033.pt"
]
},
{
"steps": 2000033,
"file_path": "results/Huggy2/Huggy/Huggy-2000033.onnx",
"reward": null,
"creation_time": 1732591200.452949,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000033.pt"
]
}
],
"final_checkpoint": {
"steps": 2000033,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": null,
"creation_time": 1732591200.452949,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000033.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}