ppo-Huggy / run_logs /training_status.json
Osborn-bh's picture
Huggy
452e6af
{
"Huggy": {
"checkpoints": [
{
"steps": 199989,
"file_path": "results/Huggy/Huggy/Huggy-199989.onnx",
"reward": 3.423819661140442,
"creation_time": 1700834102.058882,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199989.pt"
]
},
{
"steps": 399974,
"file_path": "results/Huggy/Huggy/Huggy-399974.onnx",
"reward": 3.8597026245068697,
"creation_time": 1700834358.4512107,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399974.pt"
]
},
{
"steps": 599874,
"file_path": "results/Huggy/Huggy/Huggy-599874.onnx",
"reward": 3.767772244996038,
"creation_time": 1700834620.6466365,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599874.pt"
]
},
{
"steps": 799998,
"file_path": "results/Huggy/Huggy/Huggy-799998.onnx",
"reward": 3.9973543201677897,
"creation_time": 1700834878.9718254,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799998.pt"
]
},
{
"steps": 999995,
"file_path": "results/Huggy/Huggy/Huggy-999995.onnx",
"reward": 3.9223464613375456,
"creation_time": 1700835141.4017,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999995.pt"
]
},
{
"steps": 1199947,
"file_path": "results/Huggy/Huggy/Huggy-1199947.onnx",
"reward": 3.4110145895711836,
"creation_time": 1700835405.4207206,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199947.pt"
]
},
{
"steps": 1399908,
"file_path": "results/Huggy/Huggy/Huggy-1399908.onnx",
"reward": 3.661539063133575,
"creation_time": 1700835665.4950926,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399908.pt"
]
},
{
"steps": 1599955,
"file_path": "results/Huggy/Huggy/Huggy-1599955.onnx",
"reward": 3.6623590254097533,
"creation_time": 1700835929.3568954,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599955.pt"
]
},
{
"steps": 1799971,
"file_path": "results/Huggy/Huggy/Huggy-1799971.onnx",
"reward": 3.4288908598270824,
"creation_time": 1700836192.664871,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799971.pt"
]
},
{
"steps": 1999853,
"file_path": "results/Huggy/Huggy/Huggy-1999853.onnx",
"reward": 3.59198596230661,
"creation_time": 1700836451.024303,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999853.pt"
]
},
{
"steps": 2000603,
"file_path": "results/Huggy/Huggy/Huggy-2000603.onnx",
"reward": 3.5587394318388936,
"creation_time": 1700836451.2306473,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000603.pt"
]
}
],
"final_checkpoint": {
"steps": 2000603,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.5587394318388936,
"creation_time": 1700836451.2306473,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000603.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}