ppo-Huggy / run_logs /training_status.json
wyglauk's picture
Huggy
f20fd49 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199970,
"file_path": "results/Huggy2/Huggy/Huggy-199970.onnx",
"reward": 3.0417364025488496,
"creation_time": 1730209421.1276417,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199970.pt"
]
},
{
"steps": 399991,
"file_path": "results/Huggy2/Huggy/Huggy-399991.onnx",
"reward": 3.5976463719721763,
"creation_time": 1730209662.0296466,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399991.pt"
]
},
{
"steps": 599939,
"file_path": "results/Huggy2/Huggy/Huggy-599939.onnx",
"reward": 3.874199861829931,
"creation_time": 1730209917.7986612,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599939.pt"
]
},
{
"steps": 799907,
"file_path": "results/Huggy2/Huggy/Huggy-799907.onnx",
"reward": 3.941972533659059,
"creation_time": 1730210169.190412,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799907.pt"
]
},
{
"steps": 999937,
"file_path": "results/Huggy2/Huggy/Huggy-999937.onnx",
"reward": 3.9477483313400428,
"creation_time": 1730210423.5553145,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999937.pt"
]
},
{
"steps": 1199956,
"file_path": "results/Huggy2/Huggy/Huggy-1199956.onnx",
"reward": 3.9088793202888135,
"creation_time": 1730210678.7052832,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199956.pt"
]
},
{
"steps": 1399934,
"file_path": "results/Huggy2/Huggy/Huggy-1399934.onnx",
"reward": 3.647069420133318,
"creation_time": 1730210932.6573224,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399934.pt"
]
},
{
"steps": 1599970,
"file_path": "results/Huggy2/Huggy/Huggy-1599970.onnx",
"reward": 3.8004663773437044,
"creation_time": 1730211184.4554963,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599970.pt"
]
},
{
"steps": 1799972,
"file_path": "results/Huggy2/Huggy/Huggy-1799972.onnx",
"reward": 3.704621474515824,
"creation_time": 1730211436.5148387,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799972.pt"
]
},
{
"steps": 1999956,
"file_path": "results/Huggy2/Huggy/Huggy-1999956.onnx",
"reward": 3.6539258054784827,
"creation_time": 1730211693.4651475,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999956.pt"
]
},
{
"steps": 2000011,
"file_path": "results/Huggy2/Huggy/Huggy-2000011.onnx",
"reward": 3.664058999011391,
"creation_time": 1730211693.5771127,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000011.pt"
]
}
],
"final_checkpoint": {
"steps": 2000011,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.664058999011391,
"creation_time": 1730211693.5771127,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000011.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.0+cu121"
}
}