ppo-Huggy-01 / run_logs /training_status.json
ibadrehman's picture
Huggy
3c2b7e6
{
"Huggy": {
"checkpoints": [
{
"steps": 199634,
"file_path": "results/Huggy/Huggy/Huggy-199634.onnx",
"reward": 3.30457671456141,
"creation_time": 1671105788.9823053,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199634.pt"
]
},
{
"steps": 399927,
"file_path": "results/Huggy/Huggy/Huggy-399927.onnx",
"reward": 3.9476437191752827,
"creation_time": 1671106000.1559699,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399927.pt"
]
},
{
"steps": 599986,
"file_path": "results/Huggy/Huggy/Huggy-599986.onnx",
"reward": 3.0834344148635866,
"creation_time": 1671106214.6825008,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599986.pt"
]
},
{
"steps": 799964,
"file_path": "results/Huggy/Huggy/Huggy-799964.onnx",
"reward": 3.9092666964051586,
"creation_time": 1671106427.118452,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799964.pt"
]
},
{
"steps": 999969,
"file_path": "results/Huggy/Huggy/Huggy-999969.onnx",
"reward": 3.948695606844766,
"creation_time": 1671106641.5969937,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999969.pt"
]
},
{
"steps": 1199982,
"file_path": "results/Huggy/Huggy/Huggy-1199982.onnx",
"reward": 3.7870144491132938,
"creation_time": 1671106854.208265,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199982.pt"
]
},
{
"steps": 1399467,
"file_path": "results/Huggy/Huggy/Huggy-1399467.onnx",
"reward": 3.4381251335144043,
"creation_time": 1671107065.925518,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399467.pt"
]
},
{
"steps": 1599965,
"file_path": "results/Huggy/Huggy/Huggy-1599965.onnx",
"reward": 3.9928891120970933,
"creation_time": 1671107275.881521,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599965.pt"
]
},
{
"steps": 1799967,
"file_path": "results/Huggy/Huggy/Huggy-1799967.onnx",
"reward": 3.600570172071457,
"creation_time": 1671107486.5738215,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799967.pt"
]
},
{
"steps": 1999919,
"file_path": "results/Huggy/Huggy/Huggy-1999919.onnx",
"reward": 3.837912772977075,
"creation_time": 1671107698.6445215,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999919.pt"
]
},
{
"steps": 2000022,
"file_path": "results/Huggy/Huggy/Huggy-2000022.onnx",
"reward": 3.921401720155369,
"creation_time": 1671107698.7598915,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000022.pt"
]
}
],
"final_checkpoint": {
"steps": 2000022,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.921401720155369,
"creation_time": 1671107698.7598915,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000022.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}