ppo-Huggy / run_logs /training_status.json
hs1710's picture
Huggy the dog ready
6a6a9de
{
"Huggy": {
"checkpoints": [
{
"steps": 199968,
"file_path": "results/Huggy/Huggy/Huggy-199968.onnx",
"reward": 3.5098996594034393,
"creation_time": 1691011349.2235792,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199968.pt"
]
},
{
"steps": 399944,
"file_path": "results/Huggy/Huggy/Huggy-399944.onnx",
"reward": 4.244269669055939,
"creation_time": 1691011585.8875656,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399944.pt"
]
},
{
"steps": 599910,
"file_path": "results/Huggy/Huggy/Huggy-599910.onnx",
"reward": 3.4142166674137115,
"creation_time": 1691011826.6579437,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599910.pt"
]
},
{
"steps": 799583,
"file_path": "results/Huggy/Huggy/Huggy-799583.onnx",
"reward": 3.9043923593774625,
"creation_time": 1691012065.7458348,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799583.pt"
]
},
{
"steps": 999944,
"file_path": "results/Huggy/Huggy/Huggy-999944.onnx",
"reward": 4.026764316805478,
"creation_time": 1691012313.0049007,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999944.pt"
]
},
{
"steps": 1199983,
"file_path": "results/Huggy/Huggy/Huggy-1199983.onnx",
"reward": 3.7248878141619124,
"creation_time": 1691012555.783961,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199983.pt"
]
},
{
"steps": 1399932,
"file_path": "results/Huggy/Huggy/Huggy-1399932.onnx",
"reward": 3.56764061178141,
"creation_time": 1691012802.3262887,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399932.pt"
]
},
{
"steps": 1599998,
"file_path": "results/Huggy/Huggy/Huggy-1599998.onnx",
"reward": 3.878545314657922,
"creation_time": 1691013050.9549494,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599998.pt"
]
},
{
"steps": 1799999,
"file_path": "results/Huggy/Huggy/Huggy-1799999.onnx",
"reward": 3.4767394250547383,
"creation_time": 1691013299.7216415,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799999.pt"
]
},
{
"steps": 1999983,
"file_path": "results/Huggy/Huggy/Huggy-1999983.onnx",
"reward": 4.049030184745789,
"creation_time": 1691013549.8775253,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999983.pt"
]
},
{
"steps": 2000042,
"file_path": "results/Huggy/Huggy/Huggy-2000042.onnx",
"reward": 4.018647125789097,
"creation_time": 1691013550.00118,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000042.pt"
]
}
],
"final_checkpoint": {
"steps": 2000042,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.018647125789097,
"creation_time": 1691013550.00118,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000042.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}