ppo-Huggy / run_logs /training_status.json
zeeshan-sardar's picture
Huggy
f3ca054
{
"Huggy": {
"checkpoints": [
{
"steps": 199840,
"file_path": "results/Huggy/Huggy/Huggy-199840.onnx",
"reward": 3.3089215933386957,
"creation_time": 1683791064.4436345,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199840.pt"
]
},
{
"steps": 399923,
"file_path": "results/Huggy/Huggy/Huggy-399923.onnx",
"reward": 4.107977960207691,
"creation_time": 1683791299.9536886,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399923.pt"
]
},
{
"steps": 599948,
"file_path": "results/Huggy/Huggy/Huggy-599948.onnx",
"reward": 3.4989074230194093,
"creation_time": 1683791541.4318638,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599948.pt"
]
},
{
"steps": 799949,
"file_path": "results/Huggy/Huggy/Huggy-799949.onnx",
"reward": 4.040041236060389,
"creation_time": 1683791780.1831026,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799949.pt"
]
},
{
"steps": 999830,
"file_path": "results/Huggy/Huggy/Huggy-999830.onnx",
"reward": 3.8198567675793265,
"creation_time": 1683792023.0647454,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999830.pt"
]
},
{
"steps": 1199792,
"file_path": "results/Huggy/Huggy/Huggy-1199792.onnx",
"reward": 3.966910434323688,
"creation_time": 1683792266.5828805,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199792.pt"
]
},
{
"steps": 1399952,
"file_path": "results/Huggy/Huggy/Huggy-1399952.onnx",
"reward": 4.97040205001831,
"creation_time": 1683792509.4413815,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399952.pt"
]
},
{
"steps": 1599995,
"file_path": "results/Huggy/Huggy/Huggy-1599995.onnx",
"reward": 3.8350014835596085,
"creation_time": 1683792747.0330806,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599995.pt"
]
},
{
"steps": 1799800,
"file_path": "results/Huggy/Huggy/Huggy-1799800.onnx",
"reward": 4.0212877256828445,
"creation_time": 1683792990.0597594,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799800.pt"
]
},
{
"steps": 1999459,
"file_path": "results/Huggy/Huggy/Huggy-1999459.onnx",
"reward": 4.065268781839633,
"creation_time": 1683793230.117619,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999459.pt"
]
},
{
"steps": 2000209,
"file_path": "results/Huggy/Huggy/Huggy-2000209.onnx",
"reward": 3.9251918460314092,
"creation_time": 1683793230.268093,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000209.pt"
]
}
],
"final_checkpoint": {
"steps": 2000209,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9251918460314092,
"creation_time": 1683793230.268093,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000209.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}