Upload PPO LunarLander-v2 trained agent
Browse files- README.md +1 -1
- config.json +1 -1
- ppo-LunarLander-v2.zip +2 -2
- ppo-LunarLander-v2/data +19 -19
- ppo-LunarLander-v2/policy.optimizer.pth +1 -1
- ppo-LunarLander-v2/policy.pth +1 -1
- replay.mp4 +0 -0
- results.json +1 -1
README.md
CHANGED
@@ -16,7 +16,7 @@ model-index:
|
|
16 |
type: LunarLander-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
-
value:
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
|
|
16 |
type: LunarLander-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
+
value: 276.18 +/- 24.66
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7c3a228ba8c0>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7c3a228ba950>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7c3a228ba9e0>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7c3a228baa70>", "_build": "<function ActorCriticPolicy._build at 0x7c3a228bab00>", "forward": "<function ActorCriticPolicy.forward at 0x7c3a228bab90>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x7c3a228bac20>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7c3a228bacb0>", "_predict": "<function ActorCriticPolicy._predict at 0x7c3a228bad40>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7c3a228badd0>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7c3a228bae60>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7c3a228baef0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7c3a22856dc0>"}, "verbose": 1, "policy_kwargs": {}, "num_timesteps": 1000448, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1730107716529201814, "learning_rate": 0.0003, "tensorboard_log": null, "_last_obs": null, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdAAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYBAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwGFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.00044800000000000395, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVGAwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHCAnYUWVNaMAWyUTQABjAF0lEdApdDLdvbXYnV9lChoBkdAbsX9H+ZPVWgHS+doCEdApdFsw8GLUHV9lChoBkdAcDoawljVhGgHTRMBaAhHQKXSJns9jgB1fZQoaAZHQHBsvLX+VC5oB0v1aAhHQKXTfxvvSc91fZQoaAZHQHLMa6reZXxoB00RAmgIR0Cl1VQaJhvzdX2UKGgGR0BuX1lZowmFaAdL/GgIR0Cl1kE1/DtPdX2UKGgGR0Bwz+C/XXiBaAdL/2gIR0Cl2AcxKxs3dX2UKGgGR0ByWh0zTF2naAdNHAFoCEdApdjsG1QZXXV9lChoBkdAPmU+cH4XXWgHS9FoCEdApdl7ujRD1HV9lChoBkdAcg9JLM9r42gHTQEBaAhHQKXaLmfXf651fZQoaAZHQG5j19F4LThoB0v1aAhHQKXbb0fYBeZ1fZQoaAZHQHCvKuOjqOdoB0v/aAhHQKXcHXhfjS51fZQoaAZHQHMKVRDTjNpoB00aAWgIR0Cl3N2RigCfdX2UKGgGR0Bsio/qxC6ZaAdNEgFoCEdApd2fRZ2ZA3V9lChoBkdAcdkg1m8M/mgHTSYBaAhHQKXe+qBmPHV1fZQoaAZHQHCM6TSsr/doB00nAWgIR0Cl38j1XeWOdX2UKGgGR0BxWaKsMiKSaAdNEwFoCEdApeCDpeNT+HV9lChoBkdAbkhm/WUbDWgHS/toCEdApeE0dT5wfnV9lChoBkdAciUa6jFhomgHS+poCEdApeJnQBxPwnV9lChoBkdATrXuXu3MIWgHS9doCEdApeL5gZ0jknV9lChoBkdAcpmBOYYzi2gHTRoBaAhHQKXjwNy5qdp1fZQoaAZHQHA2pbpu/DdoB0v/aAhHQKXkbPM0P6N1fZQoaAZHQHLPdcOby6NoB01IAWgIR0Cl5euM+/xldX2UKGgGR0BuM/wAlv61aAdL/WgIR0Cl5pkcjqwAdX2UKGgGR0BynBQaaTfSaAdL3WgIR0Cl5zCUX531dX2UKGgGR0Bx7e4iHIp6aAdNJQFoCEdApef/kRzzVnV9lChoBkdAcEtM8YAKfGgHTRQBaAhHQKXpSuFHrhR1fZQoaAZHQHAr9PUKArhoB0vyaAhHQKXp9fXwsoV1fZQoaAZHQG8RKYZ2pyZoB00TAWgIR0Cl6q0Od5IIdX2UKGgGR0Bu3kzdk8RuaAdNBAFoCEdApethwyZa3nV9lChoBkdAcVVze40/GGgHS9poCEdApeyiItUXHnV9lChoBkdAcem2nbZezGgHTQwBaAhHQKXtqDvmYBx1fZQoaAZHQHBGH9ehPCVoB00UAWgIR0Cl7pTf779AdX2UKGgGR8A35LuhK15TaAdLoWgIR0Cl7ylAE+xGdX2UKGgGR0BzwE5lvqC6aAdNIAFoCEdApfEb5IpYtHV9lChoBkdAcgi2Kl54W2gHTS8BaAhHQKXySHoHLRt1fZQoaAZHQHI0XCwbEP1oB00bAWgIR0Cl8wu938oAdX2UKGgGR0BxzjgR9PUKaAdL9WgIR0Cl87cd5prUdX2UKGgGR0Bzx/KISDh+aAdL9WgIR0Cl9PHK4hECdX2UKGgGR0BoOJ4IKMNuaAdN6ANoCEdApfhJXGOuJXV9lChoBkdAcVsZLIxQBWgHS+BoCEdApfjlzQu27XV9lChoBkdAcIgivgWJrWgHTW0BaAhHQKX546jnFHd1fZQoaAZHQHDtArxy4nZoB00SAWgIR0Cl+z5j6N2ldX2UKGgGR0Bw4tS/CZWraAdL/mgIR0Cl+/EEs8PndX2UKGgGR0BwEmw8nuzAaAdL32gIR0Cl/I1BD5TIdX2UKGgGR0BxVfqiXY16aAdNeAFoCEdApf2VWEK3NXV9lChoBkdAcWyhwEQoTmgHTQUBaAhHQKX+4TGHYYl1fZQoaAZHQHEG9BnjABVoB00HAWgIR0Cl/5biZOSGdX2UKGgGR0ByGUc7yQPqaAdL6mgIR0CmADk8A7xNdX2UKGgGR0BxpixLTQVsaAdL62gIR0CmAN6wD/2kdX2UKGgGR0BOf32M85jpaAdL0mgIR0CmAgOJDVpcdX2UKGgGR0BxlfLPldTpaAdL+2gIR0CmArHKfWc0dX2UKGgGR0BxAiY5T6zmaAdNAQFoCEdApgNsFY+0PnV9lChoBkdAcUinB+F10WgHS/9oCEdApgQdKZlWfnV9lChoBkdAcQKARChN/WgHTSoBaAhHQKYFf2wmmch1fZQoaAZHQHG4+3UhFE1oB0v0aAhHQKYGJB1LamJ1fZQoaAZHQHD/pimVJMBoB00SAWgIR0CmBxpbt7a7dX2UKGgGR0BwU772tdRjaAdL5WgIR0CmB+DIaLn+dX2UKGgGR0BufDWmP5pKaAdNGQFoCEdApgmoHmig03V9lChoBkdASq2tITXarWgHS9loCEdApgp/+l0o0HV9lChoBkdAc0Blj3Ehq2gHS+hoCEdApgtaL61stXV9lChoBkdAbeCnrpqynmgHTQUBaAhHQKYMTrTH80l1fZQoaAZHQGXqm3F1jiJoB03oA2gIR0CmD6lme18cdX2UKGgGR0BxSkWrOqvNaAdNEwFoCEdAphBhrJr+HnV9lChoBkdAcSDewLVnVWgHTS0BaAhHQKYRw32mHgx1fZQoaAZHQHITrf1pTMtoB003AWgIR0CmEpZwXIludX2UKGgGR0BRGE/nnuAqaAdLrmgIR0CmEw/R/mT1dX2UKGgGR0BwqoZl4C6paAdL9mgIR0CmFFTYmLLqdX2UKGgGR0ByMPci4axYaAdNPgFoCEdAphU5LkCFK3V9lChoBkdAb5A5avA442gHTQ8BaAhHQKYV9Q3PzFx1fZQoaAZHQHDMk0m+j/NoB00cAWgIR0CmFrKt5le4dX2UKGgGR0Bt7YY+B6KMaAdL/GgIR0CmF/Wwu/UOdX2UKGgGR0BsILsUqQRxaAdNEwFoCEdAphiyisXBQHV9lChoBkdATTkLlV94NmgHS9JoCEdAphlJV0cOsnV9lChoBkdAcR8LwWnCO2gHTSoBaAhHQKYaGJZ4fOl1fZQoaAZHQFHPLDAJswdoB0vQaAhHQKYbQflIVdp1fZQoaAZHQHEmW6shgVpoB0vraAhHQKYb6VAzHjp1fZQoaAZHQE/7XV9Wp61oB0vaaAhHQKYcgZx7zCl1fZQoaAZHQHB5gLqlgtxoB00JAWgIR0CmHT4lyBCldX2UKGgGR0Bt7Db8FY+0aAdL9WgIR0CmHnaqjrRjdX2UKGgGR0BGuuFHrhR7aAdL2mgIR0CmHwyUC7sfdX2UKGgGR0BxFIYYR/ViaAdNHAFoCEdAph/PqTr3TXV9lChoBkdAbn+Qd0aIe2gHTQYBaAhHQKYgoXqJMxp1fZQoaAZHQHF91jmSyMVoB00kAWgIR0CmImt03fhudX2UKGgGR0Bui2ICU5dXaAdNLgFoCEdApiOHVTaTOnV9lChoBkdAcO76NEPUa2gHS/hoCEdApiR9TBInSnV9lChoBkdAUVrE1l5GBmgHTegDaAhHQKYoFPN3W4F1fZQoaAZHQHD2EAT7EYRoB00lAWgIR0CmKNycTakAdX2UKGgGR0BBPu2JBPbgaAdLxGgIR0CmKfeoLofTdX2UKGgGR0BsU0BKcurZaAdL+2gIR0CmKqGYa5wwdX2UKGgGR0Byl6QGOdXlaAdNAAFoCEdApitSasp5NXV9lChoBkdAcRgT4tYjjmgHTQUBaAhHQKYsCloDgZV1fZQoaAZHQGwRu/Dcdo5oB00HAWgIR0CmLVZN47iidX2UKGgGR0BxmvzreIl/aAdNNwFoCEdApi4uweNkv3V9lChoBkdAQODYNAkcCGgHS9doCEdApi69SMtK7XV9lChoBkdAb4e+lj3Eh2gHTQABaAhHQKYvawM6RyR1fZQoaAZHQG6jPomois5oB00SAWgIR0CmML12aDwpdX2UKGgGR0BGd/uTibUgaAdL0GgIR0CmMUuAAhjfdX2UKGgGR0Bwikoc7yQQaAdNDAFoCEdApjIJY7q6fHV9lChoBkdAcbcU2DQJHGgHTTkBaAhHQKYy2d9Ujs51ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 6204, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_shape": [8], "low": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "low_repr": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high_repr": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWV/QAAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCmMBWR0eXBllGgLjAJpOJSJiIeUUpQoSwNoD05OTkr/////Sv////9LAHSUYowKX25wX3JhbmRvbZROdWIu", "n": "4", "start": "0", "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 1, "n_steps": 1024, "gamma": 0.999, "gae_lambda": 0.98, "ent_coef": 0.01, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 4, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVrQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUaACMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFowEZnVuY5SMDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVrQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUaACMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFowEZnVuY5SMDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8zqSowVTJhhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-6.1.85+-x86_64-with-glibc2.35 # 1 SMP PREEMPT_DYNAMIC Thu Jun 27 21:05:47 UTC 2024", "Python": "3.10.12", "Stable-Baselines3": "2.0.0a5", "PyTorch": "2.5.0+cu121", "GPU Enabled": "True", "Numpy": "1.26.4", "Cloudpickle": "3.1.0", "Gymnasium": "0.28.1", "OpenAI Gym": "0.25.2"}}
|
|
|
1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7cf74a827d90>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7cf74a827e20>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7cf74a827eb0>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7cf74a827f40>", "_build": "<function ActorCriticPolicy._build at 0x7cf74a834040>", "forward": "<function ActorCriticPolicy.forward at 0x7cf74a8340d0>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x7cf74a834160>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7cf74a8341f0>", "_predict": "<function ActorCriticPolicy._predict at 0x7cf74a834280>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7cf74a834310>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7cf74a8343a0>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7cf74a834430>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7cf6f492cc00>"}, "verbose": 1, "policy_kwargs": {}, "num_timesteps": 1500160, "_total_timesteps": 1500000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1730499814071290753, "learning_rate": 0.0003, "tensorboard_log": null, "_last_obs": null, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdAAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYBAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwGFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.00010666666666669933, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVAAwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHD2YpQUHpuMAWyUTQ4BjAF0lEdAo/mN6mfoR3V9lChoBkdAb2b7wazeGmgHS/VoCEdAo/p7UTcqOXV9lChoBkdAT2EaGYa5w2gHS7VoCEdAo/sselsP8XV9lChoBkdAShssasIVumgHS7poCEdAo/vsHObAlHV9lChoBkdAcTvdszl90GgHS/xoCEdAo/y036yjYnV9lChoBkdAcSwh9b5dnmgHTQ0BaAhHQKP+Aqaw2VF1fZQoaAZHQG/0Y1YQrc1oB0v5aAhHQKP+rv8ZUDN1fZQoaAZHQHGJpemelKtoB0vyaAhHQKP/UgJTl1d1fZQoaAZHQHF7gXMyJsRoB0voaAhHQKP/8Wu5jH51fZQoaAZHQHGbrtZ3cHpoB0vuaAhHQKQBLbzshPl1fZQoaAZHQHESHvc8DCBoB00kAWgIR0CkAfUSZjQRdX2UKGgGR0BxWrK2a2F4aAdL42gIR0CkApaMBIWhdX2UKGgGR0BwdFvrGBFvaAdL8mgIR0CkAzh2OhkBdX2UKGgGR0BwW5i/fwZwaAdNCgFoCEdApASE/0NBnnV9lChoBkdAcNTSV4X402gHS/doCEdApAUqLsKLKnV9lChoBkdAcgnmYjSofmgHTRsBaAhHQKQF6JY1YQt1fZQoaAZHQG920MG5c1RoB0v3aAhHQKQGlD5TIeZ1fZQoaAZHQHEOx+SbH6xoB00UAWgIR0CkB9s+V1OkdX2UKGgGR0Bx60NjLB9DaAdL/WgIR0CkCIuc2BJ7dX2UKGgGR0Bwd2u6mO2iaAdL7mgIR0CkCSxYq5LAdX2UKGgGR0BywMuOCGvfaAdNEAFoCEdApAni6jFhonV9lChoBkdAb8/Un5SFXmgHS/RoCEdApAsZXEIgNnV9lChoBkdAcBCP8AJb+2gHS/NoCEdApAu704BFNXV9lChoBkdAcRp+6Ae7tmgHTQcBaAhHQKQMcDPGACp1fZQoaAZHQEbfbi6xxDNoB0vEaAhHQKQM9EBKcut1fZQoaAZHQHE0lRLsa89oB0vkaAhHQKQOIErXlKd1fZQoaAZHQHBOO49X9zhoB00BAWgIR0CkDtabF0gbdX2UKGgGR0BvYLm0VrRCaAdL42gIR0CkD29mg8KYdX2UKGgGR0BwTiPtD2J0aAdNEAFoCEdApBAoP3BYWHV9lChoBkdAcl9FfReC1GgHS/9oCEdApBHGhdt2tHV9lChoBkdAcITrqt5lfGgHTQYBaAhHQKQSrPgNwzd1fZQoaAZHQG88suOCGvhoB0vsaAhHQKQTjnmq5sl1fZQoaAZHQGR59G7SRbNoB03oA2gIR0CkF1Gthd+odX2UKGgGR0A43eJ53TuwaAdLu2gIR0CkF9JxvNu+dX2UKGgGR0BwaRiNKh+OaAdL9GgIR0CkGHso2GZedX2UKGgGR0BwwLW07bL2aAdNEgFoCEdApBnL59E1EXV9lChoBkdAYb+163RXwWgHTegDaAhHQKQdF7HAAQx1fZQoaAZHQDp8lOXVsk9oB0u9aAhHQKQdmCQtBfN1fZQoaAZHQHGa0HMUypJoB00hAWgIR0CkHluwosqbdX2UKGgGR0BwwQhje9BbaAdL42gIR0CkHvr+o99udX2UKGgGR0BzF9nzxwyZaAdNDQFoCEdApCBGXkYGdXV9lChoBkdAcQDYOlO45WgHTSEBaAhHQKQhEMF2V3V1fZQoaAZHQHGdmz0HyEtoB0v7aAhHQKQhujD8+A51fZQoaAZHQHBY4iLVFx5oB00CAWgIR0CkImm5UcXFdX2UKGgGR0Bvwa3PRiPRaAdL/GgIR0CkI661b7j1dX2UKGgGR0BL3Z0KZ2IPaAdLumgIR0CkJClDfFaTdX2UKGgGR0A+3P5YYBNmaAdLsGgIR0CkJKH2RJVbdX2UKGgGR0BmMEO3DvVmaAdN6ANoCEdApCfuMuOCG3V9lChoBkdAcKHKNQ0oB2gHS99oCEdApCiGMn7YTXV9lChoBkdAcmNWmgrYoWgHTQcBaAhHQKQpVA6+36R1fZQoaAZHQHBDcWXTmXBoB0vpaAhHQKQq3EiMYMx1fZQoaAZHQHNdxgy/KyRoB0vYaAhHQKQrofK6nR91fZQoaAZHQHHKG0iQkopoB0v9aAhHQKQsmahpQDV1fZQoaAZHQEuN5fMOf/ZoB0vCaAhHQKQtT2saKk51fZQoaAZHQG/WDKxLTQVoB0vxaAhHQKQu8uwosqd1fZQoaAZHQHHeJRCQcPxoB0v/aAhHQKQvpWuoxYd1fZQoaAZHQG9cFjVhCt1oB0vpaAhHQKQwQgyuZCx1fZQoaAZHQHAQt03fhuRoB0v8aAhHQKQw8QyRB/t1fZQoaAZHQHGG0XHim2toB0vzaAhHQKQyKZG8VYZ1fZQoaAZHQHDmDEvTPSloB0vZaAhHQKQyvODaoMt1fZQoaAZHQG73+IVM23toB0v2aAhHQKQzX84xUNt1fZQoaAZHQG61GknCwbFoB0v/aAhHQKQ0Dw2ETQF1fZQoaAZHQHKxtUS7GvRoB0v3aAhHQKQ0treIl+p1fZQoaAZHQHJkpg5R0ltoB0viaAhHQKQ156u4gA91fZQoaAZHQHCvlHWjGkxoB0vyaAhHQKQ2jlsguAZ1fZQoaAZHQG+sp0nw5NpoB01eAWgIR0CkN4CobXHzdX2UKGgGR0ByGkTg2qDLaAdL52gIR0CkOLIyTINmdX2UKGgGR0BwV7r4WUKRaAdL9WgIR0CkOVn8KohqdX2UKGgGR0Bydmya/h2oaAdNIgFoCEdApDokHWz4UXV9lChoBkdAciVSjxkNF2gHS9xoCEdApDq64x1xKnV9lChoBkdAcEZ73wkPc2gHS/1oCEdApDv7we/5+HV9lChoBkdAb6feN1hb4mgHS+loCEdApDydgtvn83V9lChoBkdAQPYQ+UyHmGgHS8NoCEdApD0kNc4YJnV9lChoBkdAcVaSq2jO9mgHS9loCEdApD2391loUXV9lChoBkdAb7/CzkZJkGgHS/JoCEdApD5fGn4wiHV9lChoBkdAYweqjJuEVWgHTegDaAhHQKRBpf779AJ1fZQoaAZHQGN3B/7SApdoB03oA2gIR0CkRiJj+aScdX2UKGgGR0BxhO3lS0jUaAdNBwFoCEdApEgqqyWzGHV9lChoBkdAb8F4k/r0KGgHTQABaAhHQKRJQ4CIUJx1fZQoaAZHQG0qq7yxzJZoB0vzaAhHQKRKZNcnmaJ1fZQoaAZHQGEGX4Kx9ohoB03oA2gIR0CkTn7sfJV9dX2UKGgGR0ByitC8e0XxaAdL/WgIR0CkTzB73PAwdX2UKGgGR0BFbQ6hg3LnaAdLv2gIR0CkT7MPjGT+dX2UKGgGR0By1aZof0VaaAdL7GgIR0CkUOrncL0BdX2UKGgGR0Bw7ABbOeJ6aAdL/mgIR0CkUaBClabGdX2UKGgGR0BvqUefZmI1aAdL9WgIR0CkUkcJlar4dX2UKGgGR0ByEvs1KoQ4aAdL1GgIR0CkUty+xnnMdX2UKGgGR0BxGQ78vVVhaAdL72gIR0CkVBB7VrhzdX2UKGgGR0Bw986JZW7waAdL7GgIR0CkVLHUtqYadX2UKGgGR0Bm2zDn/1g6aAdN6ANoCEdApFf9V3ljmXV9lChoBkdAcnVLHdXT3WgHS/FoCEdApFih0bLlm3V9lChoBkdAb2jI1cdHUmgHS9loCEdApFk3ztkWh3V9lChoBkdAclzLThHby2gHS/hoCEdApFpxIe5nUXV9lChoBkdAcwBqaw2VFGgHS+poCEdApFsV9ORDC3V9lChoBkdAcbzOxSpBHGgHS+xoCEdApFu04rBj4HV9lChoBkdAbGsXE61b7mgHTQ4BaAhHQKRcaURnOB11fZQoaAZHQG9hju8brC5oB00GAWgIR0CkXa1h9b5edX2UKGgGR0By+8uxrzoVaAdL+mgIR0CkXlLjYI0JdX2UKGgGR0Bx7jb5/LDAaAdNBgFoCEdApF8DEYO2A3V9lChoBkdAcWr5IpYs/mgHTQQBaAhHQKRfwA08/2V1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 5860, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_shape": [8], "low": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "low_repr": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high_repr": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWV/QAAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCmMBWR0eXBllGgLjAJpOJSJiIeUUpQoSwNoD05OTkr/////Sv////9LAHSUYowKX25wX3JhbmRvbZROdWIu", "n": "4", "start": "0", "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 1, "n_steps": 1024, "gamma": 0.999, "gae_lambda": 0.98, "ent_coef": 0.01, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 4, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVrQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUaACMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFowEZnVuY5SMDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVrQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUaACMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFowEZnVuY5SMDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8zqSowVTJhhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-6.1.85+-x86_64-with-glibc2.35 # 1 SMP PREEMPT_DYNAMIC Thu Jun 27 21:05:47 UTC 2024", "Python": "3.10.12", "Stable-Baselines3": "2.0.0a5", "PyTorch": "2.5.0+cu121", "GPU Enabled": "True", "Numpy": "1.26.4", "Cloudpickle": "3.1.0", "Gymnasium": "0.28.1", "OpenAI Gym": "0.25.2"}}
|
ppo-LunarLander-v2.zip
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:00df2f3011e3d76a748faba84579a6ae11af9aa81d90cf5d255faa84ab6a8205
|
3 |
+
size 147155
|
ppo-LunarLander-v2/data
CHANGED
@@ -4,29 +4,29 @@
|
|
4 |
":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
-
"__init__": "<function ActorCriticPolicy.__init__ at
|
8 |
-
"_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at
|
9 |
-
"reset_noise": "<function ActorCriticPolicy.reset_noise at
|
10 |
-
"_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at
|
11 |
-
"_build": "<function ActorCriticPolicy._build at
|
12 |
-
"forward": "<function ActorCriticPolicy.forward at
|
13 |
-
"extract_features": "<function ActorCriticPolicy.extract_features at
|
14 |
-
"_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at
|
15 |
-
"_predict": "<function ActorCriticPolicy._predict at
|
16 |
-
"evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at
|
17 |
-
"get_distribution": "<function ActorCriticPolicy.get_distribution at
|
18 |
-
"predict_values": "<function ActorCriticPolicy.predict_values at
|
19 |
"__abstractmethods__": "frozenset()",
|
20 |
-
"_abc_impl": "<_abc._abc_data object at
|
21 |
},
|
22 |
"verbose": 1,
|
23 |
"policy_kwargs": {},
|
24 |
-
"num_timesteps":
|
25 |
-
"_total_timesteps":
|
26 |
"_num_timesteps_at_start": 0,
|
27 |
"seed": null,
|
28 |
"action_noise": null,
|
29 |
-
"start_time":
|
30 |
"learning_rate": 0.0003,
|
31 |
"tensorboard_log": null,
|
32 |
"_last_obs": null,
|
@@ -38,17 +38,17 @@
|
|
38 |
"_episode_num": 0,
|
39 |
"use_sde": false,
|
40 |
"sde_sample_freq": -1,
|
41 |
-
"_current_progress_remaining": -0.
|
42 |
"_stats_window_size": 100,
|
43 |
"ep_info_buffer": {
|
44 |
":type:": "<class 'collections.deque'>",
|
45 |
-
":serialized:": "
|
46 |
},
|
47 |
"ep_success_buffer": {
|
48 |
":type:": "<class 'collections.deque'>",
|
49 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
50 |
},
|
51 |
-
"_n_updates":
|
52 |
"observation_space": {
|
53 |
":type:": "<class 'gymnasium.spaces.box.Box'>",
|
54 |
":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=",
|
|
|
4 |
":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function ActorCriticPolicy.__init__ at 0x7cf74a827d90>",
|
8 |
+
"_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7cf74a827e20>",
|
9 |
+
"reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7cf74a827eb0>",
|
10 |
+
"_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7cf74a827f40>",
|
11 |
+
"_build": "<function ActorCriticPolicy._build at 0x7cf74a834040>",
|
12 |
+
"forward": "<function ActorCriticPolicy.forward at 0x7cf74a8340d0>",
|
13 |
+
"extract_features": "<function ActorCriticPolicy.extract_features at 0x7cf74a834160>",
|
14 |
+
"_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7cf74a8341f0>",
|
15 |
+
"_predict": "<function ActorCriticPolicy._predict at 0x7cf74a834280>",
|
16 |
+
"evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7cf74a834310>",
|
17 |
+
"get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7cf74a8343a0>",
|
18 |
+
"predict_values": "<function ActorCriticPolicy.predict_values at 0x7cf74a834430>",
|
19 |
"__abstractmethods__": "frozenset()",
|
20 |
+
"_abc_impl": "<_abc._abc_data object at 0x7cf6f492cc00>"
|
21 |
},
|
22 |
"verbose": 1,
|
23 |
"policy_kwargs": {},
|
24 |
+
"num_timesteps": 1500160,
|
25 |
+
"_total_timesteps": 1500000,
|
26 |
"_num_timesteps_at_start": 0,
|
27 |
"seed": null,
|
28 |
"action_noise": null,
|
29 |
+
"start_time": 1730499814071290753,
|
30 |
"learning_rate": 0.0003,
|
31 |
"tensorboard_log": null,
|
32 |
"_last_obs": null,
|
|
|
38 |
"_episode_num": 0,
|
39 |
"use_sde": false,
|
40 |
"sde_sample_freq": -1,
|
41 |
+
"_current_progress_remaining": -0.00010666666666669933,
|
42 |
"_stats_window_size": 100,
|
43 |
"ep_info_buffer": {
|
44 |
":type:": "<class 'collections.deque'>",
|
45 |
+
":serialized:": "gAWVAAwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHD2YpQUHpuMAWyUTQ4BjAF0lEdAo/mN6mfoR3V9lChoBkdAb2b7wazeGmgHS/VoCEdAo/p7UTcqOXV9lChoBkdAT2EaGYa5w2gHS7VoCEdAo/sselsP8XV9lChoBkdAShssasIVumgHS7poCEdAo/vsHObAlHV9lChoBkdAcTvdszl90GgHS/xoCEdAo/y036yjYnV9lChoBkdAcSwh9b5dnmgHTQ0BaAhHQKP+Aqaw2VF1fZQoaAZHQG/0Y1YQrc1oB0v5aAhHQKP+rv8ZUDN1fZQoaAZHQHGJpemelKtoB0vyaAhHQKP/UgJTl1d1fZQoaAZHQHF7gXMyJsRoB0voaAhHQKP/8Wu5jH51fZQoaAZHQHGbrtZ3cHpoB0vuaAhHQKQBLbzshPl1fZQoaAZHQHESHvc8DCBoB00kAWgIR0CkAfUSZjQRdX2UKGgGR0BxWrK2a2F4aAdL42gIR0CkApaMBIWhdX2UKGgGR0BwdFvrGBFvaAdL8mgIR0CkAzh2OhkBdX2UKGgGR0BwW5i/fwZwaAdNCgFoCEdApASE/0NBnnV9lChoBkdAcNTSV4X402gHS/doCEdApAUqLsKLKnV9lChoBkdAcgnmYjSofmgHTRsBaAhHQKQF6JY1YQt1fZQoaAZHQG920MG5c1RoB0v3aAhHQKQGlD5TIeZ1fZQoaAZHQHEOx+SbH6xoB00UAWgIR0CkB9s+V1OkdX2UKGgGR0Bx60NjLB9DaAdL/WgIR0CkCIuc2BJ7dX2UKGgGR0Bwd2u6mO2iaAdL7mgIR0CkCSxYq5LAdX2UKGgGR0BywMuOCGvfaAdNEAFoCEdApAni6jFhonV9lChoBkdAb8/Un5SFXmgHS/RoCEdApAsZXEIgNnV9lChoBkdAcBCP8AJb+2gHS/NoCEdApAu704BFNXV9lChoBkdAcRp+6Ae7tmgHTQcBaAhHQKQMcDPGACp1fZQoaAZHQEbfbi6xxDNoB0vEaAhHQKQM9EBKcut1fZQoaAZHQHE0lRLsa89oB0vkaAhHQKQOIErXlKd1fZQoaAZHQHBOO49X9zhoB00BAWgIR0CkDtabF0gbdX2UKGgGR0BvYLm0VrRCaAdL42gIR0CkD29mg8KYdX2UKGgGR0BwTiPtD2J0aAdNEAFoCEdApBAoP3BYWHV9lChoBkdAcl9FfReC1GgHS/9oCEdApBHGhdt2tHV9lChoBkdAcITrqt5lfGgHTQYBaAhHQKQSrPgNwzd1fZQoaAZHQG88suOCGvhoB0vsaAhHQKQTjnmq5sl1fZQoaAZHQGR59G7SRbNoB03oA2gIR0CkF1Gthd+odX2UKGgGR0A43eJ53TuwaAdLu2gIR0CkF9JxvNu+dX2UKGgGR0BwaRiNKh+OaAdL9GgIR0CkGHso2GZedX2UKGgGR0BwwLW07bL2aAdNEgFoCEdApBnL59E1EXV9lChoBkdAYb+163RXwWgHTegDaAhHQKQdF7HAAQx1fZQoaAZHQDp8lOXVsk9oB0u9aAhHQKQdmCQtBfN1fZQoaAZHQHGa0HMUypJoB00hAWgIR0CkHluwosqbdX2UKGgGR0BwwQhje9BbaAdL42gIR0CkHvr+o99udX2UKGgGR0BzF9nzxwyZaAdNDQFoCEdApCBGXkYGdXV9lChoBkdAcQDYOlO45WgHTSEBaAhHQKQhEMF2V3V1fZQoaAZHQHGdmz0HyEtoB0v7aAhHQKQhujD8+A51fZQoaAZHQHBY4iLVFx5oB00CAWgIR0CkImm5UcXFdX2UKGgGR0Bvwa3PRiPRaAdL/GgIR0CkI661b7j1dX2UKGgGR0BL3Z0KZ2IPaAdLumgIR0CkJClDfFaTdX2UKGgGR0A+3P5YYBNmaAdLsGgIR0CkJKH2RJVbdX2UKGgGR0BmMEO3DvVmaAdN6ANoCEdApCfuMuOCG3V9lChoBkdAcKHKNQ0oB2gHS99oCEdApCiGMn7YTXV9lChoBkdAcmNWmgrYoWgHTQcBaAhHQKQpVA6+36R1fZQoaAZHQHBDcWXTmXBoB0vpaAhHQKQq3EiMYMx1fZQoaAZHQHNdxgy/KyRoB0vYaAhHQKQrofK6nR91fZQoaAZHQHHKG0iQkopoB0v9aAhHQKQsmahpQDV1fZQoaAZHQEuN5fMOf/ZoB0vCaAhHQKQtT2saKk51fZQoaAZHQG/WDKxLTQVoB0vxaAhHQKQu8uwosqd1fZQoaAZHQHHeJRCQcPxoB0v/aAhHQKQvpWuoxYd1fZQoaAZHQG9cFjVhCt1oB0vpaAhHQKQwQgyuZCx1fZQoaAZHQHAQt03fhuRoB0v8aAhHQKQw8QyRB/t1fZQoaAZHQHGG0XHim2toB0vzaAhHQKQyKZG8VYZ1fZQoaAZHQHDmDEvTPSloB0vZaAhHQKQyvODaoMt1fZQoaAZHQG73+IVM23toB0v2aAhHQKQzX84xUNt1fZQoaAZHQG61GknCwbFoB0v/aAhHQKQ0Dw2ETQF1fZQoaAZHQHKxtUS7GvRoB0v3aAhHQKQ0treIl+p1fZQoaAZHQHJkpg5R0ltoB0viaAhHQKQ156u4gA91fZQoaAZHQHCvlHWjGkxoB0vyaAhHQKQ2jlsguAZ1fZQoaAZHQG+sp0nw5NpoB01eAWgIR0CkN4CobXHzdX2UKGgGR0ByGkTg2qDLaAdL52gIR0CkOLIyTINmdX2UKGgGR0BwV7r4WUKRaAdL9WgIR0CkOVn8KohqdX2UKGgGR0Bydmya/h2oaAdNIgFoCEdApDokHWz4UXV9lChoBkdAciVSjxkNF2gHS9xoCEdApDq64x1xKnV9lChoBkdAcEZ73wkPc2gHS/1oCEdApDv7we/5+HV9lChoBkdAb6feN1hb4mgHS+loCEdApDydgtvn83V9lChoBkdAQPYQ+UyHmGgHS8NoCEdApD0kNc4YJnV9lChoBkdAcVaSq2jO9mgHS9loCEdApD2391loUXV9lChoBkdAb7/CzkZJkGgHS/JoCEdApD5fGn4wiHV9lChoBkdAYweqjJuEVWgHTegDaAhHQKRBpf779AJ1fZQoaAZHQGN3B/7SApdoB03oA2gIR0CkRiJj+aScdX2UKGgGR0BxhO3lS0jUaAdNBwFoCEdApEgqqyWzGHV9lChoBkdAb8F4k/r0KGgHTQABaAhHQKRJQ4CIUJx1fZQoaAZHQG0qq7yxzJZoB0vzaAhHQKRKZNcnmaJ1fZQoaAZHQGEGX4Kx9ohoB03oA2gIR0CkTn7sfJV9dX2UKGgGR0ByitC8e0XxaAdL/WgIR0CkTzB73PAwdX2UKGgGR0BFbQ6hg3LnaAdLv2gIR0CkT7MPjGT+dX2UKGgGR0By1aZof0VaaAdL7GgIR0CkUOrncL0BdX2UKGgGR0Bw7ABbOeJ6aAdL/mgIR0CkUaBClabGdX2UKGgGR0BvqUefZmI1aAdL9WgIR0CkUkcJlar4dX2UKGgGR0ByEvs1KoQ4aAdL1GgIR0CkUty+xnnMdX2UKGgGR0BxGQ78vVVhaAdL72gIR0CkVBB7VrhzdX2UKGgGR0Bw986JZW7waAdL7GgIR0CkVLHUtqYadX2UKGgGR0Bm2zDn/1g6aAdN6ANoCEdApFf9V3ljmXV9lChoBkdAcnVLHdXT3WgHS/FoCEdApFih0bLlm3V9lChoBkdAb2jI1cdHUmgHS9loCEdApFk3ztkWh3V9lChoBkdAclzLThHby2gHS/hoCEdApFpxIe5nUXV9lChoBkdAcwBqaw2VFGgHS+poCEdApFsV9ORDC3V9lChoBkdAcbzOxSpBHGgHS+xoCEdApFu04rBj4HV9lChoBkdAbGsXE61b7mgHTQ4BaAhHQKRcaURnOB11fZQoaAZHQG9hju8brC5oB00GAWgIR0CkXa1h9b5edX2UKGgGR0By+8uxrzoVaAdL+mgIR0CkXlLjYI0JdX2UKGgGR0Bx7jb5/LDAaAdNBgFoCEdApF8DEYO2A3V9lChoBkdAcWr5IpYs/mgHTQQBaAhHQKRfwA08/2V1ZS4="
|
46 |
},
|
47 |
"ep_success_buffer": {
|
48 |
":type:": "<class 'collections.deque'>",
|
49 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
50 |
},
|
51 |
+
"_n_updates": 5860,
|
52 |
"observation_space": {
|
53 |
":type:": "<class 'gymnasium.spaces.box.Box'>",
|
54 |
":serialized:": "gAWVdgIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWCAAAAAAAAAABAQEBAQEBAZRoFUsIhZRoGXSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBEoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaAtLCIWUaBl0lFKUjARoaWdolGgRKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgLSwiFlGgZdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=",
|
ppo-LunarLander-v2/policy.optimizer.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 88490
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:11ad45f7f8c79e111fb2c21f292758ed6fc50bb2a22631a4deeb7d2bbfb490cf
|
3 |
size 88490
|
ppo-LunarLander-v2/policy.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 43762
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:af2ca40561bcbfdc11ed4c81e876b6072d626d8cbcda624a99ea0bdb035cadd0
|
3 |
size 43762
|
replay.mp4
CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
|
|
results.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"mean_reward":
|
|
|
1 |
+
{"mean_reward": 276.1839692, "std_reward": 24.663124630698924, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2024-11-01T23:07:54.014048"}
|