ppo-Huggy / run_logs /training_status.json
PampX's picture
Huggy
c84089c verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199832,
"file_path": "results/Huggy/Huggy/Huggy-199832.onnx",
"reward": 3.275998375454887,
"creation_time": 1733237350.7928278,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199832.pt"
]
},
{
"steps": 399941,
"file_path": "results/Huggy/Huggy/Huggy-399941.onnx",
"reward": 3.544259657665175,
"creation_time": 1733237609.7245173,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399941.pt"
]
},
{
"steps": 599889,
"file_path": "results/Huggy/Huggy/Huggy-599889.onnx",
"reward": 3.692464381456375,
"creation_time": 1733237866.6712291,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599889.pt"
]
},
{
"steps": 799682,
"file_path": "results/Huggy/Huggy/Huggy-799682.onnx",
"reward": 3.8855678190355714,
"creation_time": 1733238126.4774222,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799682.pt"
]
},
{
"steps": 999940,
"file_path": "results/Huggy/Huggy/Huggy-999940.onnx",
"reward": 3.4399983302140846,
"creation_time": 1733238385.5538495,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999940.pt"
]
},
{
"steps": 1199962,
"file_path": "results/Huggy/Huggy/Huggy-1199962.onnx",
"reward": 3.4813055271326108,
"creation_time": 1733238641.663411,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199962.pt"
]
},
{
"steps": 1399985,
"file_path": "results/Huggy/Huggy/Huggy-1399985.onnx",
"reward": 3.5797248188654582,
"creation_time": 1733238900.8948622,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399985.pt"
]
},
{
"steps": 1599923,
"file_path": "results/Huggy/Huggy/Huggy-1599923.onnx",
"reward": 3.617549652663561,
"creation_time": 1733239162.4407225,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599923.pt"
]
},
{
"steps": 1799443,
"file_path": "results/Huggy/Huggy/Huggy-1799443.onnx",
"reward": 3.1815761047251083,
"creation_time": 1733239430.1673768,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799443.pt"
]
},
{
"steps": 1999853,
"file_path": "results/Huggy/Huggy/Huggy-1999853.onnx",
"reward": 3.4031471729278566,
"creation_time": 1733239688.0329072,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999853.pt"
]
},
{
"steps": 2000603,
"file_path": "results/Huggy/Huggy/Huggy-2000603.onnx",
"reward": 3.3692642996982975,
"creation_time": 1733239688.2688606,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000603.pt"
]
}
],
"final_checkpoint": {
"steps": 2000603,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.3692642996982975,
"creation_time": 1733239688.2688606,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000603.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}