ppo-Huggy / run_logs /training_status.json
HCho's picture
Huggyletsgo
261c08b verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199844,
"file_path": "results/Huggy2/Huggy/Huggy-199844.onnx",
"reward": 3.249212582078245,
"creation_time": 1735780315.8404794,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199844.pt"
]
},
{
"steps": 399899,
"file_path": "results/Huggy2/Huggy/Huggy-399899.onnx",
"reward": 3.5898442772718577,
"creation_time": 1735780553.2817214,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399899.pt"
]
},
{
"steps": 599997,
"file_path": "results/Huggy2/Huggy/Huggy-599997.onnx",
"reward": 3.5309062279187717,
"creation_time": 1735780792.3586526,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599997.pt"
]
},
{
"steps": 799954,
"file_path": "results/Huggy2/Huggy/Huggy-799954.onnx",
"reward": 3.6048280490411293,
"creation_time": 1735781032.2514002,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799954.pt"
]
},
{
"steps": 999993,
"file_path": "results/Huggy2/Huggy/Huggy-999993.onnx",
"reward": 3.517209830609235,
"creation_time": 1735781270.9435139,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999993.pt"
]
},
{
"steps": 1199944,
"file_path": "results/Huggy2/Huggy/Huggy-1199944.onnx",
"reward": 3.76247820854187,
"creation_time": 1735781510.9492645,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199944.pt"
]
},
{
"steps": 1399994,
"file_path": "results/Huggy2/Huggy/Huggy-1399994.onnx",
"reward": 3.728397221114505,
"creation_time": 1735781745.0593004,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399994.pt"
]
},
{
"steps": 1599716,
"file_path": "results/Huggy2/Huggy/Huggy-1599716.onnx",
"reward": 3.8280984284356236,
"creation_time": 1735781986.3418064,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599716.pt"
]
},
{
"steps": 1799954,
"file_path": "results/Huggy2/Huggy/Huggy-1799954.onnx",
"reward": 3.581490708179161,
"creation_time": 1735782225.7772396,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799954.pt"
]
},
{
"steps": 1999395,
"file_path": "results/Huggy2/Huggy/Huggy-1999395.onnx",
"reward": 4.431330864246075,
"creation_time": 1735782467.661196,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999395.pt"
]
},
{
"steps": 2000145,
"file_path": "results/Huggy2/Huggy/Huggy-2000145.onnx",
"reward": 3.8669976677213396,
"creation_time": 1735782467.8706467,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000145.pt"
]
}
],
"final_checkpoint": {
"steps": 2000145,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8669976677213396,
"creation_time": 1735782467.8706467,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000145.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}