ppo-Huggy / run_logs /training_status.json
twnatelo's picture
Huggy
163efdf verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199826,
"file_path": "results/Huggy2/Huggy/Huggy-199826.onnx",
"reward": 3.1630417276436176,
"creation_time": 1735633772.9049478,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199826.pt"
]
},
{
"steps": 399891,
"file_path": "results/Huggy2/Huggy/Huggy-399891.onnx",
"reward": 3.8440069883100447,
"creation_time": 1735634033.40958,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399891.pt"
]
},
{
"steps": 599959,
"file_path": "results/Huggy2/Huggy/Huggy-599959.onnx",
"reward": 4.090990190322582,
"creation_time": 1735634300.219198,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599959.pt"
]
},
{
"steps": 799933,
"file_path": "results/Huggy2/Huggy/Huggy-799933.onnx",
"reward": 3.9430204409360887,
"creation_time": 1735634565.9956436,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799933.pt"
]
},
{
"steps": 999981,
"file_path": "results/Huggy2/Huggy/Huggy-999981.onnx",
"reward": 3.7687451472649207,
"creation_time": 1735634837.4547682,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999981.pt"
]
},
{
"steps": 1199948,
"file_path": "results/Huggy2/Huggy/Huggy-1199948.onnx",
"reward": 3.6722870141852137,
"creation_time": 1735635107.883385,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199948.pt"
]
},
{
"steps": 1399977,
"file_path": "results/Huggy2/Huggy/Huggy-1399977.onnx",
"reward": 4.140916992317546,
"creation_time": 1735635377.5598361,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399977.pt"
]
},
{
"steps": 1599885,
"file_path": "results/Huggy2/Huggy/Huggy-1599885.onnx",
"reward": 3.9502513463370468,
"creation_time": 1735635654.8874202,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599885.pt"
]
},
{
"steps": 1799929,
"file_path": "results/Huggy2/Huggy/Huggy-1799929.onnx",
"reward": 3.770589975019296,
"creation_time": 1735635945.6851666,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799929.pt"
]
},
{
"steps": 1999960,
"file_path": "results/Huggy2/Huggy/Huggy-1999960.onnx",
"reward": 3.8854986126460727,
"creation_time": 1735636237.9976597,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999960.pt"
]
},
{
"steps": 2000053,
"file_path": "results/Huggy2/Huggy/Huggy-2000053.onnx",
"reward": 3.861960919573903,
"creation_time": 1735636238.1217277,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000053.pt"
]
}
],
"final_checkpoint": {
"steps": 2000053,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.861960919573903,
"creation_time": 1735636238.1217277,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000053.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}