{ "Huggy": { "checkpoints": [ { "steps": 199981, "file_path": "results/Huggy/Huggy/Huggy-199981.onnx", "reward": 3.0337611364595816, "creation_time": 1671116823.87191, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-199981.pt" ] }, { "steps": 399964, "file_path": "results/Huggy/Huggy/Huggy-399964.onnx", "reward": 3.6102802316347757, "creation_time": 1671117046.2267764, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-399964.pt" ] }, { "steps": 599991, "file_path": "results/Huggy/Huggy/Huggy-599991.onnx", "reward": 4.187929255621774, "creation_time": 1671117269.347012, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-599991.pt" ] }, { "steps": 799911, "file_path": "results/Huggy/Huggy/Huggy-799911.onnx", "reward": 3.968155728363843, "creation_time": 1671117488.353527, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-799911.pt" ] }, { "steps": 999989, "file_path": "results/Huggy/Huggy/Huggy-999989.onnx", "reward": 4.0198675096035, "creation_time": 1671117713.260704, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-999989.pt" ] }, { "steps": 1199975, "file_path": "results/Huggy/Huggy/Huggy-1199975.onnx", "reward": 3.97700306822042, "creation_time": 1671117938.888522, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1199975.pt" ] }, { "steps": 1399956, "file_path": "results/Huggy/Huggy/Huggy-1399956.onnx", "reward": 3.822022313487773, "creation_time": 1671118159.9621978, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1399956.pt" ] }, { "steps": 1599936, "file_path": "results/Huggy/Huggy/Huggy-1599936.onnx", "reward": 3.5725148601965473, "creation_time": 1671118386.2077324, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1599936.pt" ] }, { "steps": 1799304, "file_path": "results/Huggy/Huggy/Huggy-1799304.onnx", "reward": 3.8949482922370615, "creation_time": 1671118611.964429, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1799304.pt" ] }, { "steps": 1999939, "file_path": "results/Huggy/Huggy/Huggy-1999939.onnx", "reward": 3.453361695011457, "creation_time": 1671118839.8844352, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-1999939.pt" ] }, { "steps": 2000060, "file_path": "results/Huggy/Huggy/Huggy-2000060.onnx", "reward": 3.541641182899475, "creation_time": 1671118840.010992, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000060.pt" ] } ], "final_checkpoint": { "steps": 2000060, "file_path": "results/Huggy/Huggy.onnx", "reward": 3.541641182899475, "creation_time": 1671118840.010992, "auxillary_file_paths": [ "results/Huggy/Huggy/Huggy-2000060.pt" ] } }, "metadata": { "stats_format_version": "0.3.0", "mlagents_version": "0.29.0.dev0", "torch_version": "1.8.1+cu102" } }