ppo-Huggy / run_logs /training_status.json
weepingdogel's picture
Huggy
f27dbf5 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199785,
"file_path": "results/Huggy2/Huggy/Huggy-199785.onnx",
"reward": 3.848875925189159,
"creation_time": 1736484541.0110238,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199785.pt"
]
},
{
"steps": 399902,
"file_path": "results/Huggy2/Huggy/Huggy-399902.onnx",
"reward": 3.702237656340003,
"creation_time": 1736484805.6658454,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399902.pt"
]
},
{
"steps": 599941,
"file_path": "results/Huggy2/Huggy/Huggy-599941.onnx",
"reward": 3.34106121821837,
"creation_time": 1736485081.2329156,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599941.pt"
]
},
{
"steps": 799957,
"file_path": "results/Huggy2/Huggy/Huggy-799957.onnx",
"reward": 3.7441003641342734,
"creation_time": 1736485349.5478065,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799957.pt"
]
},
{
"steps": 999931,
"file_path": "results/Huggy2/Huggy/Huggy-999931.onnx",
"reward": 3.448867432276408,
"creation_time": 1736485619.0375671,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999931.pt"
]
},
{
"steps": 1199879,
"file_path": "results/Huggy2/Huggy/Huggy-1199879.onnx",
"reward": 3.4891293692210366,
"creation_time": 1736485890.3502018,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199879.pt"
]
},
{
"steps": 1399959,
"file_path": "results/Huggy2/Huggy/Huggy-1399959.onnx",
"reward": 3.9662718176841736,
"creation_time": 1736486164.6258078,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399959.pt"
]
},
{
"steps": 1599925,
"file_path": "results/Huggy2/Huggy/Huggy-1599925.onnx",
"reward": 3.6904031236966452,
"creation_time": 1736486431.9796574,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599925.pt"
]
},
{
"steps": 1799869,
"file_path": "results/Huggy2/Huggy/Huggy-1799869.onnx",
"reward": 3.377926770846049,
"creation_time": 1736486705.096585,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799869.pt"
]
},
{
"steps": 1999978,
"file_path": "results/Huggy2/Huggy/Huggy-1999978.onnx",
"reward": 3.1676332583794227,
"creation_time": 1736486979.6517065,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999978.pt"
]
},
{
"steps": 2000039,
"file_path": "results/Huggy2/Huggy/Huggy-2000039.onnx",
"reward": 3.1167987755366733,
"creation_time": 1736486979.7736683,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000039.pt"
]
}
],
"final_checkpoint": {
"steps": 2000039,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.1167987755366733,
"creation_time": 1736486979.7736683,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000039.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}