{ "Huggy": { "checkpoints": [ { "steps": 499996, "file_path": "results/Huggy/Huggy/Huggy-499996.onnx", "reward": 3.671555533295586, "creation_time": 1670381396.218783, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-499996.pt" ] }, { "steps": 999983, "file_path": "results/Huggy/Huggy/Huggy-999983.onnx", "reward": 3.9387628414281983, "creation_time": 1670381940.1520593, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-999983.pt" ] }, { "steps": 1499968, "file_path": "results/Huggy/Huggy/Huggy-1499968.onnx", "reward": 3.8858162410242456, "creation_time": 1670382486.7656188, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1499968.pt" ] }, { "steps": 1999990, "file_path": "results/Huggy/Huggy/Huggy-1999990.onnx", "reward": 4.059384526038657, "creation_time": 1670383047.2882288, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1999990.pt" ] }, { "steps": 2000015, "file_path": "results/Huggy/Huggy/Huggy-2000015.onnx", "reward": 4.032740612222691, "creation_time": 1670383047.404701, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000015.pt" ] } ], "final_checkpoint": { "steps": 2000015, "file_path": "results/Huggy/Huggy.onnx", "reward": 4.032740612222691, "creation_time": 1670383047.404701, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000015.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "0.29.0.dev0", "torch_version": "1.8.1+cu102" } }