ppo-Huggy / run_logs /training_status.json
pixeldoggo's picture
Huggy
08bd2cb verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199960,
"file_path": "results/Huggy2/Huggy/Huggy-199960.onnx",
"reward": 3.616135911665101,
"creation_time": 1729943417.9907699,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199960.pt"
]
},
{
"steps": 399982,
"file_path": "results/Huggy2/Huggy/Huggy-399982.onnx",
"reward": 3.7514229803890378,
"creation_time": 1729943665.176279,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399982.pt"
]
},
{
"steps": 599948,
"file_path": "results/Huggy2/Huggy/Huggy-599948.onnx",
"reward": 4.115144457135882,
"creation_time": 1729943917.6011658,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599948.pt"
]
},
{
"steps": 799964,
"file_path": "results/Huggy2/Huggy/Huggy-799964.onnx",
"reward": 3.900278953801502,
"creation_time": 1729944166.7862926,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799964.pt"
]
},
{
"steps": 999949,
"file_path": "results/Huggy2/Huggy/Huggy-999949.onnx",
"reward": 3.7812681959110237,
"creation_time": 1729944420.9129472,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999949.pt"
]
},
{
"steps": 1199993,
"file_path": "results/Huggy2/Huggy/Huggy-1199993.onnx",
"reward": 3.980447501528497,
"creation_time": 1729944679.5330446,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199993.pt"
]
},
{
"steps": 1399938,
"file_path": "results/Huggy2/Huggy/Huggy-1399938.onnx",
"reward": 4.23830445189225,
"creation_time": 1729944937.9314568,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399938.pt"
]
},
{
"steps": 1599953,
"file_path": "results/Huggy2/Huggy/Huggy-1599953.onnx",
"reward": 3.916651943423709,
"creation_time": 1729945194.733353,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599953.pt"
]
},
{
"steps": 1799926,
"file_path": "results/Huggy2/Huggy/Huggy-1799926.onnx",
"reward": 3.4948786976081982,
"creation_time": 1729945451.4970279,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799926.pt"
]
},
{
"steps": 1999959,
"file_path": "results/Huggy2/Huggy/Huggy-1999959.onnx",
"reward": 4.074446930680224,
"creation_time": 1729945707.6926584,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999959.pt"
]
},
{
"steps": 2000049,
"file_path": "results/Huggy2/Huggy/Huggy-2000049.onnx",
"reward": 4.093718702488757,
"creation_time": 1729945707.810991,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000049.pt"
]
}
],
"final_checkpoint": {
"steps": 2000049,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.093718702488757,
"creation_time": 1729945707.810991,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000049.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.0+cu121"
}
}