ppo-Huggy / run_logs /training_status.json
KaiquanMah
Huggy
9c9dfcd
{
"Huggy": {
"checkpoints": [
{
"steps": 199933,
"file_path": "results/Huggy/Huggy/Huggy-199933.onnx",
"reward": 3.42556362712022,
"creation_time": 1685183831.1310914,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199933.pt"
]
},
{
"steps": 399866,
"file_path": "results/Huggy/Huggy/Huggy-399866.onnx",
"reward": 3.8199829399585723,
"creation_time": 1685184067.5280106,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399866.pt"
]
},
{
"steps": 599977,
"file_path": "results/Huggy/Huggy/Huggy-599977.onnx",
"reward": 3.368118792772293,
"creation_time": 1685184308.9508471,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599977.pt"
]
},
{
"steps": 799977,
"file_path": "results/Huggy/Huggy/Huggy-799977.onnx",
"reward": 3.7728174948829345,
"creation_time": 1685184547.0668151,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799977.pt"
]
},
{
"steps": 999934,
"file_path": "results/Huggy/Huggy/Huggy-999934.onnx",
"reward": 3.922189011130222,
"creation_time": 1685184786.066901,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999934.pt"
]
},
{
"steps": 1199960,
"file_path": "results/Huggy/Huggy/Huggy-1199960.onnx",
"reward": 3.8488970904639275,
"creation_time": 1685185023.6779177,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199960.pt"
]
},
{
"steps": 1399945,
"file_path": "results/Huggy/Huggy/Huggy-1399945.onnx",
"reward": 3.901291877624998,
"creation_time": 1685185260.331527,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399945.pt"
]
},
{
"steps": 1599960,
"file_path": "results/Huggy/Huggy/Huggy-1599960.onnx",
"reward": 3.7057761098887467,
"creation_time": 1685185509.0218482,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599960.pt"
]
},
{
"steps": 1799945,
"file_path": "results/Huggy/Huggy/Huggy-1799945.onnx",
"reward": 3.4867722726449735,
"creation_time": 1685185751.8630035,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799945.pt"
]
},
{
"steps": 1999558,
"file_path": "results/Huggy/Huggy/Huggy-1999558.onnx",
"reward": 4.506132315366696,
"creation_time": 1685185996.708154,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999558.pt"
]
},
{
"steps": 2000308,
"file_path": "results/Huggy/Huggy/Huggy-2000308.onnx",
"reward": 4.3280609250068665,
"creation_time": 1685185996.9474435,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000308.pt"
]
}
],
"final_checkpoint": {
"steps": 2000308,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.3280609250068665,
"creation_time": 1685185996.9474435,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000308.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}