{ "Huggy": { "checkpoints": [ { "steps": 499709, "file_path": "results/Huggy/Huggy/Huggy-499709.onnx", "reward": 3.1997179133551463, "creation_time": 1670669010.0375414, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-499709.pt" ] }, { "steps": 999915, "file_path": "results/Huggy/Huggy/Huggy-999915.onnx", "reward": 3.511120386711963, "creation_time": 1670669619.897045, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-999915.pt" ] }, { "steps": 1499956, "file_path": "results/Huggy/Huggy/Huggy-1499956.onnx", "reward": 3.9312995395190278, "creation_time": 1670670235.9230425, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1499956.pt" ] }, { "steps": 1999964, "file_path": "results/Huggy/Huggy/Huggy-1999964.onnx", "reward": 3.6843088119502228, "creation_time": 1670670849.8215632, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1999964.pt" ] }, { "steps": 2000055, "file_path": "results/Huggy/Huggy/Huggy-2000055.onnx", "reward": 3.688574822762838, "creation_time": 1670670850.0294108, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000055.pt" ] } ], "final_checkpoint": { "steps": 2000055, "file_path": "results/Huggy/Huggy.onnx", "reward": 3.688574822762838, "creation_time": 1670670850.0294108, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000055.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "0.29.0.dev0", "torch_version": "1.8.1+cu102" } }