ppo-Huggy / run_logs /training_status.json
claybowser's picture
Huggy
ae4ead9 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199800,
"file_path": "results/Huggy2/Huggy/Huggy-199800.onnx",
"reward": 3.476739459868633,
"creation_time": 1731506438.2501104,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199800.pt"
]
},
{
"steps": 399829,
"file_path": "results/Huggy2/Huggy/Huggy-399829.onnx",
"reward": 3.8689344045188694,
"creation_time": 1731506674.3844974,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399829.pt"
]
},
{
"steps": 599913,
"file_path": "results/Huggy2/Huggy/Huggy-599913.onnx",
"reward": 4.394926460165727,
"creation_time": 1731506922.543217,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599913.pt"
]
},
{
"steps": 799945,
"file_path": "results/Huggy2/Huggy/Huggy-799945.onnx",
"reward": 3.6985003318105427,
"creation_time": 1731507164.443475,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799945.pt"
]
},
{
"steps": 999929,
"file_path": "results/Huggy2/Huggy/Huggy-999929.onnx",
"reward": 3.6818090358118374,
"creation_time": 1731507407.9873085,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999929.pt"
]
},
{
"steps": 1199879,
"file_path": "results/Huggy2/Huggy/Huggy-1199879.onnx",
"reward": 3.6597012227231804,
"creation_time": 1731507656.0669985,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199879.pt"
]
},
{
"steps": 1399967,
"file_path": "results/Huggy2/Huggy/Huggy-1399967.onnx",
"reward": 3.6670790938969473,
"creation_time": 1731507902.315439,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399967.pt"
]
},
{
"steps": 1599990,
"file_path": "results/Huggy2/Huggy/Huggy-1599990.onnx",
"reward": 3.7897852928671116,
"creation_time": 1731508146.22967,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599990.pt"
]
},
{
"steps": 1799939,
"file_path": "results/Huggy2/Huggy/Huggy-1799939.onnx",
"reward": 3.6834807186336307,
"creation_time": 1731508388.1235187,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799939.pt"
]
},
{
"steps": 1999499,
"file_path": "results/Huggy2/Huggy/Huggy-1999499.onnx",
"reward": 4.2985630537334245,
"creation_time": 1731508631.0715158,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999499.pt"
]
},
{
"steps": 2000249,
"file_path": "results/Huggy2/Huggy/Huggy-2000249.onnx",
"reward": 3.8976362109184266,
"creation_time": 1731508631.2204094,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000249.pt"
]
}
],
"final_checkpoint": {
"steps": 2000249,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8976362109184266,
"creation_time": 1731508631.2204094,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000249.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.0+cu121"
}
}