ppo-Huggy / run_logs /training_status.json
mythrex's picture
Huggy
4542287
{
"Huggy": {
"checkpoints": [
{
"steps": 199817,
"file_path": "results/Huggy/Huggy/Huggy-199817.onnx",
"reward": 3.422197105228037,
"creation_time": 1693308131.8196816,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199817.pt"
]
},
{
"steps": 399946,
"file_path": "results/Huggy/Huggy/Huggy-399946.onnx",
"reward": 3.883541957537333,
"creation_time": 1693308371.0681202,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399946.pt"
]
},
{
"steps": 599919,
"file_path": "results/Huggy/Huggy/Huggy-599919.onnx",
"reward": 4.105145671150901,
"creation_time": 1693308618.3198893,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599919.pt"
]
},
{
"steps": 799825,
"file_path": "results/Huggy/Huggy/Huggy-799825.onnx",
"reward": 3.7332231021491733,
"creation_time": 1693308864.9922276,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799825.pt"
]
},
{
"steps": 999745,
"file_path": "results/Huggy/Huggy/Huggy-999745.onnx",
"reward": 3.6362410920565247,
"creation_time": 1693309111.6547704,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999745.pt"
]
},
{
"steps": 1199925,
"file_path": "results/Huggy/Huggy/Huggy-1199925.onnx",
"reward": 3.9860761915483782,
"creation_time": 1693309359.1881223,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199925.pt"
]
},
{
"steps": 1399935,
"file_path": "results/Huggy/Huggy/Huggy-1399935.onnx",
"reward": 3.841086232854474,
"creation_time": 1693309600.2636404,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399935.pt"
]
},
{
"steps": 1599887,
"file_path": "results/Huggy/Huggy/Huggy-1599887.onnx",
"reward": 3.8798574910658163,
"creation_time": 1693309847.2207704,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599887.pt"
]
},
{
"steps": 1799700,
"file_path": "results/Huggy/Huggy/Huggy-1799700.onnx",
"reward": 3.7268088177910874,
"creation_time": 1693310088.4449835,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799700.pt"
]
},
{
"steps": 1999960,
"file_path": "results/Huggy/Huggy/Huggy-1999960.onnx",
"reward": 3.6852245051810084,
"creation_time": 1693310329.9226139,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999960.pt"
]
},
{
"steps": 2000011,
"file_path": "results/Huggy/Huggy/Huggy-2000011.onnx",
"reward": 3.6445352161924043,
"creation_time": 1693310330.0465462,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000011.pt"
]
}
],
"final_checkpoint": {
"steps": 2000011,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6445352161924043,
"creation_time": 1693310330.0465462,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000011.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}