ppo-Huggy / run_logs /training_status.json
tzs's picture
Push-my-frist-Huggy-to-Hub
6b964da
{
"Huggy": {
"checkpoints": [
{
"steps": 199947,
"file_path": "results/Huggy/Huggy/Huggy-199947.onnx",
"reward": 3.4308138691462005,
"creation_time": 1678939689.5249274,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199947.pt"
]
},
{
"steps": 399740,
"file_path": "results/Huggy/Huggy/Huggy-399740.onnx",
"reward": 3.903888925358101,
"creation_time": 1678940163.795726,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399740.pt"
]
},
{
"steps": 599866,
"file_path": "results/Huggy/Huggy/Huggy-599866.onnx",
"reward": 3.8601903279622394,
"creation_time": 1678940640.4610248,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599866.pt"
]
},
{
"steps": 799989,
"file_path": "results/Huggy/Huggy/Huggy-799989.onnx",
"reward": 3.8217755518853664,
"creation_time": 1678941102.3212814,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799989.pt"
]
},
{
"steps": 999935,
"file_path": "results/Huggy/Huggy/Huggy-999935.onnx",
"reward": 3.6104141957689038,
"creation_time": 1678941582.329472,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999935.pt"
]
},
{
"steps": 1199949,
"file_path": "results/Huggy/Huggy/Huggy-1199949.onnx",
"reward": 4.030185840092599,
"creation_time": 1678942061.4512267,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199949.pt"
]
},
{
"steps": 1399934,
"file_path": "results/Huggy/Huggy/Huggy-1399934.onnx",
"reward": 3.656774039801002,
"creation_time": 1678942521.2607908,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399934.pt"
]
},
{
"steps": 1599305,
"file_path": "results/Huggy/Huggy/Huggy-1599305.onnx",
"reward": 3.416003139775533,
"creation_time": 1678942996.4888499,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599305.pt"
]
},
{
"steps": 1799968,
"file_path": "results/Huggy/Huggy/Huggy-1799968.onnx",
"reward": 3.5429136695400363,
"creation_time": 1678943472.6724634,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799968.pt"
]
},
{
"steps": 1999722,
"file_path": "results/Huggy/Huggy/Huggy-1999722.onnx",
"reward": 4.47578855071749,
"creation_time": 1678943948.8857749,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999722.pt"
]
},
{
"steps": 2000472,
"file_path": "results/Huggy/Huggy/Huggy-2000472.onnx",
"reward": 3.56253894418478,
"creation_time": 1678943949.4057248,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000472.pt"
]
}
],
"final_checkpoint": {
"steps": 2000472,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.56253894418478,
"creation_time": 1678943949.4057248,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000472.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.30.0",
"torch_version": "1.8.1+cu102"
}
}