{ "Huggy": { "checkpoints": [ { "steps": 499995, "file_path": "results/Huggy/Huggy/Huggy-499995.onnx", "reward": 3.5635685781637827, "creation_time": 1670963341.8758214, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-499995.pt" ] }, { "steps": 999469, "file_path": "results/Huggy/Huggy/Huggy-999469.onnx", "reward": 4.170792298070316, "creation_time": 1670963899.280892, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-999469.pt" ] }, { "steps": 1499969, "file_path": "results/Huggy/Huggy/Huggy-1499969.onnx", "reward": 3.844013303930309, "creation_time": 1670964455.1176922, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1499969.pt" ] }, { "steps": 1999255, "file_path": "results/Huggy/Huggy/Huggy-1999255.onnx", "reward": 4.08536776133946, "creation_time": 1670965017.2670429, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1999255.pt" ] }, { "steps": 2000005, "file_path": "results/Huggy/Huggy/Huggy-2000005.onnx", "reward": 3.8534022834565906, "creation_time": 1670965017.415554, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000005.pt" ] } ], "final_checkpoint": { "steps": 2000005, "file_path": "results/Huggy/Huggy.onnx", "reward": 3.8534022834565906, "creation_time": 1670965017.415554, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000005.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "0.29.0.dev0", "torch_version": "1.8.1+cu102" } }