ppo-Huggy / run_logs /training_status.json
ngandng's picture
Huggy
f8928f2 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199931,
"file_path": "results/Huggy2/Huggy/Huggy-199931.onnx",
"reward": 3.6312661279331553,
"creation_time": 1732682340.4366264,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199931.pt"
]
},
{
"steps": 399919,
"file_path": "results/Huggy2/Huggy/Huggy-399919.onnx",
"reward": 3.841608488246014,
"creation_time": 1732682576.7477431,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399919.pt"
]
},
{
"steps": 599883,
"file_path": "results/Huggy2/Huggy/Huggy-599883.onnx",
"reward": 3.5535850133746862,
"creation_time": 1732682817.0338945,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599883.pt"
]
},
{
"steps": 799932,
"file_path": "results/Huggy2/Huggy/Huggy-799932.onnx",
"reward": 3.857500521909623,
"creation_time": 1732683053.177877,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799932.pt"
]
},
{
"steps": 999971,
"file_path": "results/Huggy2/Huggy/Huggy-999971.onnx",
"reward": 4.127576473330663,
"creation_time": 1732683300.7681863,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999971.pt"
]
},
{
"steps": 1199900,
"file_path": "results/Huggy2/Huggy/Huggy-1199900.onnx",
"reward": 3.6227308439485952,
"creation_time": 1732683545.8542197,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199900.pt"
]
},
{
"steps": 1399992,
"file_path": "results/Huggy2/Huggy/Huggy-1399992.onnx",
"reward": 3.776273686961351,
"creation_time": 1732683785.6635354,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399992.pt"
]
},
{
"steps": 1599987,
"file_path": "results/Huggy2/Huggy/Huggy-1599987.onnx",
"reward": 3.619909492822794,
"creation_time": 1732684029.7762709,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599987.pt"
]
},
{
"steps": 1799529,
"file_path": "results/Huggy2/Huggy/Huggy-1799529.onnx",
"reward": 3.732726488510768,
"creation_time": 1732684274.079444,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799529.pt"
]
},
{
"steps": 1999951,
"file_path": "results/Huggy2/Huggy/Huggy-1999951.onnx",
"reward": 3.65497899800539,
"creation_time": 1732684516.2528837,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999951.pt"
]
},
{
"steps": 2000020,
"file_path": "results/Huggy2/Huggy/Huggy-2000020.onnx",
"reward": 3.7469241548986996,
"creation_time": 1732684516.370863,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000020.pt"
]
}
],
"final_checkpoint": {
"steps": 2000020,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.7469241548986996,
"creation_time": 1732684516.370863,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000020.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}