ppo-Huggy / run_logs /training_status.json
aarongrainer's picture
Huggy
7628734
{
"Huggy": {
"checkpoints": [
{
"steps": 199846,
"file_path": "results/Huggy/Huggy/Huggy-199846.onnx",
"reward": 3.7491436214358718,
"creation_time": 1704100145.2776513,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199846.pt"
]
},
{
"steps": 399903,
"file_path": "results/Huggy/Huggy/Huggy-399903.onnx",
"reward": 3.826616571795556,
"creation_time": 1704100389.2363365,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399903.pt"
]
},
{
"steps": 599852,
"file_path": "results/Huggy/Huggy/Huggy-599852.onnx",
"reward": 3.615945642644709,
"creation_time": 1704100641.9451275,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599852.pt"
]
},
{
"steps": 799999,
"file_path": "results/Huggy/Huggy/Huggy-799999.onnx",
"reward": 3.864833574785906,
"creation_time": 1704100894.896189,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799999.pt"
]
},
{
"steps": 999909,
"file_path": "results/Huggy/Huggy/Huggy-999909.onnx",
"reward": 3.897565647508159,
"creation_time": 1704101159.879614,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999909.pt"
]
},
{
"steps": 1199977,
"file_path": "results/Huggy/Huggy/Huggy-1199977.onnx",
"reward": 4.18917977809906,
"creation_time": 1704101426.030096,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199977.pt"
]
},
{
"steps": 1399965,
"file_path": "results/Huggy/Huggy/Huggy-1399965.onnx",
"reward": 4.937623858451843,
"creation_time": 1704101688.77882,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399965.pt"
]
},
{
"steps": 1599944,
"file_path": "results/Huggy/Huggy/Huggy-1599944.onnx",
"reward": 3.915216395388479,
"creation_time": 1704101940.530414,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599944.pt"
]
},
{
"steps": 1799938,
"file_path": "results/Huggy/Huggy/Huggy-1799938.onnx",
"reward": 3.947517048685174,
"creation_time": 1704102189.4856308,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799938.pt"
]
},
{
"steps": 1999987,
"file_path": "results/Huggy/Huggy/Huggy-1999987.onnx",
"reward": 3.681383934285906,
"creation_time": 1704102436.341767,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999987.pt"
]
},
{
"steps": 2000110,
"file_path": "results/Huggy/Huggy/Huggy-2000110.onnx",
"reward": 3.747194856405258,
"creation_time": 1704102436.4524102,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000110.pt"
]
}
],
"final_checkpoint": {
"steps": 2000110,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.747194856405258,
"creation_time": 1704102436.4524102,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000110.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}