ppo-Huggy / run_logs /training_status.json
Shrey-1329's picture
Huggy
34c7174
{
"Huggy": {
"checkpoints": [
{
"steps": 199806,
"file_path": "results/Huggy/Huggy/Huggy-199806.onnx",
"reward": 3.590755268417556,
"creation_time": 1686587000.8079193,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199806.pt"
]
},
{
"steps": 399837,
"file_path": "results/Huggy/Huggy/Huggy-399837.onnx",
"reward": 3.4406433443228406,
"creation_time": 1686587239.8505647,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399837.pt"
]
},
{
"steps": 599848,
"file_path": "results/Huggy/Huggy/Huggy-599848.onnx",
"reward": 2.655450160686786,
"creation_time": 1686587480.9827173,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599848.pt"
]
},
{
"steps": 799921,
"file_path": "results/Huggy/Huggy/Huggy-799921.onnx",
"reward": 3.658480999774711,
"creation_time": 1686587735.951263,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799921.pt"
]
},
{
"steps": 999923,
"file_path": "results/Huggy/Huggy/Huggy-999923.onnx",
"reward": 4.110999794501178,
"creation_time": 1686587972.6178222,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999923.pt"
]
},
{
"steps": 1199956,
"file_path": "results/Huggy/Huggy/Huggy-1199956.onnx",
"reward": 4.59771894192209,
"creation_time": 1686588205.6302478,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199956.pt"
]
},
{
"steps": 1399990,
"file_path": "results/Huggy/Huggy/Huggy-1399990.onnx",
"reward": 3.8094970359059865,
"creation_time": 1686588437.3425572,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399990.pt"
]
},
{
"steps": 1599976,
"file_path": "results/Huggy/Huggy/Huggy-1599976.onnx",
"reward": 3.320423329776188,
"creation_time": 1686588667.0785537,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599976.pt"
]
},
{
"steps": 1799918,
"file_path": "results/Huggy/Huggy/Huggy-1799918.onnx",
"reward": 3.6590964290830823,
"creation_time": 1686588902.4035113,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799918.pt"
]
},
{
"steps": 1999993,
"file_path": "results/Huggy/Huggy/Huggy-1999993.onnx",
"reward": 3.488340191219164,
"creation_time": 1686589138.1407545,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999993.pt"
]
},
{
"steps": 2000121,
"file_path": "results/Huggy/Huggy/Huggy-2000121.onnx",
"reward": 3.493296994103326,
"creation_time": 1686589138.3219082,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000121.pt"
]
}
],
"final_checkpoint": {
"steps": 2000121,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.493296994103326,
"creation_time": 1686589138.3219082,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000121.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}