ppo-Huggy / run_logs /training_status.json
blghtr's picture
Huggy
c128484
{
"Huggy": {
"checkpoints": [
{
"steps": 199755,
"file_path": "results/Huggy/Huggy/Huggy-199755.onnx",
"reward": 3.578132736878317,
"creation_time": 1673107144.0449257,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199755.pt"
]
},
{
"steps": 399959,
"file_path": "results/Huggy/Huggy/Huggy-399959.onnx",
"reward": 3.421715872628348,
"creation_time": 1673107359.8861449,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399959.pt"
]
},
{
"steps": 599902,
"file_path": "results/Huggy/Huggy/Huggy-599902.onnx",
"reward": 2.857063982221815,
"creation_time": 1673107575.527178,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599902.pt"
]
},
{
"steps": 799956,
"file_path": "results/Huggy/Huggy/Huggy-799956.onnx",
"reward": 3.991747788911642,
"creation_time": 1673107790.2882807,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799956.pt"
]
},
{
"steps": 999878,
"file_path": "results/Huggy/Huggy/Huggy-999878.onnx",
"reward": 3.6962568040688835,
"creation_time": 1673108010.2651775,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999878.pt"
]
},
{
"steps": 1199988,
"file_path": "results/Huggy/Huggy/Huggy-1199988.onnx",
"reward": 3.8847598549963416,
"creation_time": 1673108230.5261915,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199988.pt"
]
},
{
"steps": 1399978,
"file_path": "results/Huggy/Huggy/Huggy-1399978.onnx",
"reward": 4.761701451407538,
"creation_time": 1673108450.3415923,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399978.pt"
]
},
{
"steps": 1599936,
"file_path": "results/Huggy/Huggy/Huggy-1599936.onnx",
"reward": 3.8391015640812216,
"creation_time": 1673108668.132289,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599936.pt"
]
},
{
"steps": 1799964,
"file_path": "results/Huggy/Huggy/Huggy-1799964.onnx",
"reward": 3.566401197360112,
"creation_time": 1673108888.0919943,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799964.pt"
]
},
{
"steps": 1999376,
"file_path": "results/Huggy/Huggy/Huggy-1999376.onnx",
"reward": 3.343907952308655,
"creation_time": 1673109109.5150707,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999376.pt"
]
},
{
"steps": 2000126,
"file_path": "results/Huggy/Huggy/Huggy-2000126.onnx",
"reward": 2.958982633219825,
"creation_time": 1673109109.6643085,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000126.pt"
]
}
],
"final_checkpoint": {
"steps": 2000126,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.958982633219825,
"creation_time": 1673109109.6643085,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000126.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}