Upload PPO LunarLander-v2 trained agent
Browse files- README.md +16 -40
- config.json +1 -1
- ppo-LunarLander-v2.zip +2 -2
- ppo-LunarLander-v2/data +18 -18
- ppo-LunarLander-v2/policy.optimizer.pth +1 -1
- ppo-LunarLander-v2/policy.pth +1 -1
- ppo-LunarLander-v2/system_info.txt +3 -3
- replay.mp4 +0 -0
- results.json +1 -1
README.md
CHANGED
@@ -1,11 +1,10 @@
|
|
1 |
---
|
|
|
2 |
tags:
|
3 |
- LunarLander-v2
|
4 |
-
- ppo
|
5 |
- deep-reinforcement-learning
|
6 |
- reinforcement-learning
|
7 |
-
-
|
8 |
-
- deep-rl-course
|
9 |
model-index:
|
10 |
- name: PPO
|
11 |
results:
|
@@ -17,45 +16,22 @@ model-index:
|
|
17 |
type: LunarLander-v2
|
18 |
metrics:
|
19 |
- type: mean_reward
|
20 |
-
value:
|
21 |
name: mean_reward
|
22 |
verified: false
|
23 |
---
|
24 |
|
25 |
-
|
|
|
|
|
26 |
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
|
36 |
-
|
37 |
-
'wandb_entity': None
|
38 |
-
'capture_video': False
|
39 |
-
'env_id': 'LunarLander-v2'
|
40 |
-
'total_timesteps': 50
|
41 |
-
'learning_rate': 0.00025
|
42 |
-
'num_envs': 4
|
43 |
-
'num_steps': 128
|
44 |
-
'anneal_lr': True
|
45 |
-
'gae': True
|
46 |
-
'gamma': 0.99
|
47 |
-
'gae_lambda': 0.95
|
48 |
-
'num_minibatches': 4
|
49 |
-
'update_epochs': 4
|
50 |
-
'norm_adv': True
|
51 |
-
'clip_coef': 0.2
|
52 |
-
'clip_vloss': True
|
53 |
-
'ent_coef': 0.01
|
54 |
-
'vf_coef': 0.5
|
55 |
-
'max_grad_norm': 0.5
|
56 |
-
'target_kl': None
|
57 |
-
'repo_id': 'matthh/ppo-LunarLander-v2'
|
58 |
-
'batch_size': 512
|
59 |
-
'minibatch_size': 128}
|
60 |
-
```
|
61 |
-
|
|
|
1 |
---
|
2 |
+
library_name: stable-baselines3
|
3 |
tags:
|
4 |
- LunarLander-v2
|
|
|
5 |
- deep-reinforcement-learning
|
6 |
- reinforcement-learning
|
7 |
+
- stable-baselines3
|
|
|
8 |
model-index:
|
9 |
- name: PPO
|
10 |
results:
|
|
|
16 |
type: LunarLander-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
+
value: 270.42 +/- 13.26
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
23 |
|
24 |
+
# **PPO** Agent playing **LunarLander-v2**
|
25 |
+
This is a trained model of a **PPO** agent playing **LunarLander-v2**
|
26 |
+
using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3).
|
27 |
|
28 |
+
## Usage (with Stable-baselines3)
|
29 |
+
TODO: Add your code
|
30 |
+
|
31 |
+
|
32 |
+
```python
|
33 |
+
from stable_baselines3 import ...
|
34 |
+
from huggingface_sb3 import load_from_hub
|
35 |
+
|
36 |
+
...
|
37 |
+
```
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7fa81c515b80>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7fa81c515c10>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7fa81c515ca0>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7fa81c515d30>", "_build": "<function ActorCriticPolicy._build at 0x7fa81c515dc0>", "forward": "<function ActorCriticPolicy.forward at 0x7fa81c515e50>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x7fa81c515ee0>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7fa81c515f70>", "_predict": "<function ActorCriticPolicy._predict at 0x7fa81c517040>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7fa81c5170d0>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7fa81c517160>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7fa81c5171f0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc_data object at 0x7fa81c5942d0>"}, "verbose": 1, "policy_kwargs": {}, "observation_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVnwEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLCIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWIAAAAAAAAAAAAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/5RoCksIhZSMAUOUdJRSlIwEaGlnaJRoEiiWIAAAAAAAAAAAAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAf5RoCksIhZRoFXSUUpSMDWJvdW5kZWRfYmVsb3eUaBIolggAAAAAAAAAAAAAAAAAAACUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLCIWUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYIAAAAAAAAAAAAAAAAAAAAlGghSwiFlGgVdJRSlIwKX25wX3JhbmRvbZROdWIu", "dtype": "float32", "_shape": [8], "low": "[-inf -inf -inf -inf -inf -inf -inf -inf]", "high": "[inf inf inf inf inf inf inf inf]", "bounded_below": "[False False False False False False False False]", "bounded_above": "[False False False False False False False False]", "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.discrete.Discrete'>", ":serialized:": "gAWVggAAAAAAAACME2d5bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpRLBIwGX3NoYXBllCmMBWR0eXBllIwFbnVtcHmUaAeTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowKX25wX3JhbmRvbZROdWIu", "n": 4, "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 16, "num_timesteps": 1015808, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1675547832272716781, "learning_rate": 0.0003, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/M6kqMFUyYYWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAADOtrz0hyww+C39ZPNt5jb4Lq1Q9Bf4oPAAAAAAAAAAAM0WrPHvykrpy+tA3TRLEMtfGw7ia6fG2AACAPwAAgD8guQU+5/FOPzUI7LwKRuC+7eUAPq61FToAAAAAAAAAANhZiL74u5E/KnAHv8YqHr98CIS+9WBpvQAAAAAAAAAA0CSMPi2aXb1Tf4s7y3Y1uslyv74FmQG7AACAPwAAgD+AfGM9e5yXuvg/ozXtgpgw4gcDO4qdu7QAAIA/AACAPxP1H74QQJk/RGQpvyV5Lb8KCfm967ZcvgAAAAAAAAAAmgZKPtZtrz+CyCQ/8FvfvnUsVD7mNoU+AAAAAAAAAAAzzCI9V451PNawwL20kii+p+4GvahqzTwAAAAAAAAAAM1YgLvt7RU+kIlhvUmMfL5BoTQ7hpG+vAAAAAAAAAAATZ9OPda9sT4/uxQ9csCmvo9/TTsKX8u8AAAAAAAAAABAjic+qbKzP3Za2D7RK/O+L6QbPt4sRz4AAAAAAAAAAAM1jb6TW4k+a9bUPnPopb4WpM+9pr+sPQAAAAAAAAAAmkH1u5QJ3zuJHbA92RqAvq7VTb2Imma9AAAAAAAAAADAEaE9KUh3ur/zKblNobOzPac6uz46RDgAAAAAAAAAAJqmnb3NnYo/roCfvswaCb+l4fC9fe79vQAAAAAAAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.015808000000000044, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVRRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMINIP4wE6NckCUhpRSlIwBbJRL7owBdJRHQJL68XuVopR1fZQoaAZoCWgPQwjEr1jDxXlzQJSGlFKUaBVL+WgWR0CS+vhTwUg0dX2UKGgGaAloD0MIrkoi+6DPcUCUhpRSlGgVS+1oFkdAkvwQyM1jzHV9lChoBmgJaA9DCOD0Lt4PnnFAlIaUUpRoFU0wAWgWR0CS/E48lolEdX2UKGgGaAloD0MIZ3+g3PaWcECUhpRSlGgVTQMBaBZHQJL8wNgBtDV1fZQoaAZoCWgPQwj9hLNby0JyQJSGlFKUaBVL3WgWR0CS/W8GLUCrdX2UKGgGaAloD0MIuRrZlRaIcUCUhpRSlGgVTQMBaBZHQJL9bZsbedl1fZQoaAZoCWgPQwhdF35wvpluQJSGlFKUaBVNGQFoFkdAkv2S/KyOaXV9lChoBmgJaA9DCMcuUb21HW1AlIaUUpRoFUv6aBZHQJL+C7btZ3d1fZQoaAZoCWgPQwj8NsR4jaBwQJSGlFKUaBVL22gWR0CS/19roGILdX2UKGgGaAloD0MIie5Z12gBcUCUhpRSlGgVS/RoFkdAkv/i8jAzpHV9lChoBmgJaA9DCC46WWq9KG5AlIaUUpRoFUvsaBZHQJL/6/j81oB1fZQoaAZoCWgPQwgsvMtFPDtxQJSGlFKUaBVL5GgWR0CTAAemelKsdX2UKGgGaAloD0MIsyRATS09c0CUhpRSlGgVS/5oFkdAkwCj5j6N2nV9lChoBmgJaA9DCDgteNEX6XJAlIaUUpRoFU1CAWgWR0CTAKgx8D0UdX2UKGgGaAloD0MIDp4JTRIUb0CUhpRSlGgVS+VoFkdAkwGDwhGH6HV9lChoBmgJaA9DCGkdVU3QF3FAlIaUUpRoFUvkaBZHQJMCcGHHmzV1fZQoaAZoCWgPQwg/jubIiltyQJSGlFKUaBVL52gWR0CTA0AnlXA/dX2UKGgGaAloD0MIoUrNHijXckCUhpRSlGgVS9JoFkdAkwP79MsYmHV9lChoBmgJaA9DCEMAcOyZl3FAlIaUUpRoFU1CAWgWR0CTBEY/FBIGdX2UKGgGaAloD0MIm3YxzbRZcUCUhpRSlGgVS/xoFkdAkwScTrVvuXV9lChoBmgJaA9DCE3aVN1jwnNAlIaUUpRoFU0jAWgWR0CTBKlA/s3RdX2UKGgGaAloD0MIYabtX1mxbUCUhpRSlGgVS/poFkdAkwSyn1nM+3V9lChoBmgJaA9DCHMrhNXYXHBAlIaUUpRoFU0XAWgWR0CTBVA/9pAVdX2UKGgGaAloD0MIHAjJAibocUCUhpRSlGgVS9RoFkdAkwYGkSElFHV9lChoBmgJaA9DCAcKvJPPMnJAlIaUUpRoFU0HAWgWR0CTBtv0yxiYdX2UKGgGaAloD0MIEXFzKpk3cECUhpRSlGgVS+VoFkdAkwcyVjZtenV9lChoBmgJaA9DCF6iemtgx3BAlIaUUpRoFU0HAWgWR0CTB16XBxgidX2UKGgGaAloD0MIFlETfb7bckCUhpRSlGgVTSoBaBZHQJMIWozeoDR1fZQoaAZoCWgPQwgdWI6QAZpxQJSGlFKUaBVL+WgWR0CTCLTfBN21dX2UKGgGaAloD0MIW5iFdo47cECUhpRSlGgVS+toFkdAkwlNSl3yJHV9lChoBmgJaA9DCLsO1ZTkBG9AlIaUUpRoFUvZaBZHQJMJkc5sCT51fZQoaAZoCWgPQwi+UMB2MKhHQJSGlFKUaBVN6ANoFkdAkwoDjBEa2nV9lChoBmgJaA9DCGGKcmm8jHJAlIaUUpRoFU1XAWgWR0CTCpbiZOSGdX2UKGgGaAloD0MIL/fJUYBwS0CUhpRSlGgVS9NoFkdAkwq3nhbW3HV9lChoBmgJaA9DCAzJycStN25AlIaUUpRoFUvyaBZHQJMLNxEORT11fZQoaAZoCWgPQwgOL4hIjYRyQJSGlFKUaBVL22gWR0CTC5HgxagVdX2UKGgGaAloD0MItW6D2u/OcECUhpRSlGgVS91oFkdAkwxZQLux8nV9lChoBmgJaA9DCCE/G7luV3BAlIaUUpRoFU0cAWgWR0CTDJ7K7qY7dX2UKGgGaAloD0MI547+lystcECUhpRSlGgVS+toFkdAkw2M67ulXXV9lChoBmgJaA9DCF3Aywxb0XNAlIaUUpRoFU1MAWgWR0CTIUrYoRZmdX2UKGgGaAloD0MITfbP04DKcUCUhpRSlGgVTREBaBZHQJMi0JBw++x1fZQoaAZoCWgPQwgLRbqfkw5xQJSGlFKUaBVL7GgWR0CTIvT2FnIydX2UKGgGaAloD0MIA7LXuz/PcECUhpRSlGgVS9poFkdAkySNZJTVD3V9lChoBmgJaA9DCPqXpDLFx25AlIaUUpRoFUv2aBZHQJMk2/h2nsN1fZQoaAZoCWgPQwhzZOWXAdhyQJSGlFKUaBVNOAFoFkdAkyTq55JK8XV9lChoBmgJaA9DCNvdA3QfTXFAlIaUUpRoFUvwaBZHQJMk9kkKNQ11fZQoaAZoCWgPQwgVVb/SecRwQJSGlFKUaBVL52gWR0CTJikhA4XGdX2UKGgGaAloD0MI1zBD40nnc0CUhpRSlGgVTS4BaBZHQJMmXqHGjsV1fZQoaAZoCWgPQwjABdmyfM1xQJSGlFKUaBVL3GgWR0CTJwNBF/hEdX2UKGgGaAloD0MIYkok0YtccECUhpRSlGgVTQ0BaBZHQJMnbhrFfiR1fZQoaAZoCWgPQwiQ3Jp0mz9xQJSGlFKUaBVL/2gWR0CTJ8sMy8BddX2UKGgGaAloD0MIfa62Yj/tcECUhpRSlGgVS/ZoFkdAkykPWDpTuXV9lChoBmgJaA9DCDNPrimQKW1AlIaUUpRoFU0EAWgWR0CTKg3/giu/dX2UKGgGaAloD0MIogxVMZXIb0CUhpRSlGgVS+ZoFkdAkyo44Qz1snV9lChoBmgJaA9DCOSghJk27HFAlIaUUpRoFUvSaBZHQJMrllyzXz11fZQoaAZoCWgPQwh2Gf7TjflzQJSGlFKUaBVNGwFoFkdAky1SLqD9O3V9lChoBmgJaA9DCCMShZY1UHBAlIaUUpRoFUvfaBZHQJMuMQTVUdd1fZQoaAZoCWgPQwhVTRB1n4NxQJSGlFKUaBVL6GgWR0CTLmg/keZHdX2UKGgGaAloD0MIeJeL+E5OcECUhpRSlGgVTRkBaBZHQJMu5nzxwyZ1fZQoaAZoCWgPQwi31awzvkFMQJSGlFKUaBVLxGgWR0CTMAlIVdondX2UKGgGaAloD0MIj2yummeZckCUhpRSlGgVS/NoFkdAkzC04BFNL3V9lChoBmgJaA9DCPq0iv7Q0HFAlIaUUpRoFU0kAWgWR0CTMaEuQIUrdX2UKGgGaAloD0MIJ/kRv2K2b0CUhpRSlGgVTRwBaBZHQJMy/4Irvst1fZQoaAZoCWgPQwjLSpNSkD9wQJSGlFKUaBVNDwFoFkdAkzYwSOBDonV9lChoBmgJaA9DCAsm/ihqpHJAlIaUUpRoFUv7aBZHQJM2o6vJRwZ1fZQoaAZoCWgPQwhO8iN+xYFvQJSGlFKUaBVNDQFoFkdAkzddi+cpb3V9lChoBmgJaA9DCL74oj1edm5AlIaUUpRoFUvwaBZHQJM3o8kleGB1fZQoaAZoCWgPQwhTy9b6ohxzQJSGlFKUaBVNdQFoFkdAkzhKJyhi9nV9lChoBmgJaA9DCOVFJuBXqHBAlIaUUpRoFU1hAWgWR0CTOFVYp2ECdX2UKGgGaAloD0MIAoBjz14mcUCUhpRSlGgVS81oFkdAkzh2f9P1tnV9lChoBmgJaA9DCMIyNnQz8XFAlIaUUpRoFUvkaBZHQJM4iAuqWC51fZQoaAZoCWgPQwjLg/QUeahwQJSGlFKUaBVL6WgWR0CTOTdiDujRdX2UKGgGaAloD0MIUTHO34RDbkCUhpRSlGgVTQsBaBZHQJM62OGTLW91fZQoaAZoCWgPQwjJWdjTTuRwQJSGlFKUaBVL0mgWR0CTO89mYjSodX2UKGgGaAloD0MIWg2Je2wccUCUhpRSlGgVTQoBaBZHQJM8LtZ3cHp1fZQoaAZoCWgPQwiPHVTies9wQJSGlFKUaBVL/GgWR0CTPFiRW912dX2UKGgGaAloD0MIeLRxxFo2TECUhpRSlGgVTegDaBZHQJM9HjPv8ZV1fZQoaAZoCWgPQwg+Qs2QqjVxQJSGlFKUaBVNOwFoFkdAkz1PEKmbb3V9lChoBmgJaA9DCK9bBMa64XJAlIaUUpRoFUv3aBZHQJM+4690zTF1fZQoaAZoCWgPQwimJyzxAGpuQJSGlFKUaBVL5mgWR0CTP/tJWeYldX2UKGgGaAloD0MIKy/5n/zdcECUhpRSlGgVTQcBaBZHQJNAL1yvLYB1fZQoaAZoCWgPQwiSzOod7rJxQJSGlFKUaBVNGwFoFkdAk0BiFsYVI3V9lChoBmgJaA9DCOeO/pcrFHJAlIaUUpRoFUvhaBZHQJNAvOVxCIF1fZQoaAZoCWgPQwicpzrk5klxQJSGlFKUaBVL+2gWR0CTQN5MURFrdX2UKGgGaAloD0MImwEuyNZfcECUhpRSlGgVS/1oFkdAk0DdQTEiuHV9lChoBmgJaA9DCPJ9camKkHNAlIaUUpRoFU0EAWgWR0CTQOu+yquKdX2UKGgGaAloD0MIGJP+Xop7bECUhpRSlGgVTSYBaBZHQJNBUpnYg7p1fZQoaAZoCWgPQwi6gm3Ek05vQJSGlFKUaBVL9WgWR0CTQ1PJJXhgdX2UKGgGaAloD0MIgPRNmoZYcECUhpRSlGgVTREBaBZHQJNDXSJCSid1fZQoaAZoCWgPQwhE3QcgtRFxQJSGlFKUaBVLz2gWR0CTQ4qwQlKLdX2UKGgGaAloD0MIrKjBNEw2cECUhpRSlGgVS/RoFkdAk0O4dZJTVHV9lChoBmgJaA9DCNyEe2VeqXBAlIaUUpRoFU0LAWgWR0CTRC5Lh73PdX2UKGgGaAloD0MI95MxPkxecUCUhpRSlGgVTQIBaBZHQJNEtcRlHz91fZQoaAZoCWgPQwieXb71IXJwQJSGlFKUaBVL1mgWR0CTRjgmZ3LWdX2UKGgGaAloD0MI8bp+wS77cUCUhpRSlGgVS/RoFkdAk0cHnhbW3HV9lChoBmgJaA9DCCttcY1P629AlIaUUpRoFUvmaBZHQJNHVUNrj5t1fZQoaAZoCWgPQwhky/J1GXBwQJSGlFKUaBVNJAFoFkdAk0eRi5NGmXV9lChoBmgJaA9DCA+5GW7ADXJAlIaUUpRoFUvuaBZHQJNHt3/xUed1fZQoaAZoCWgPQwgG2h1SjERwQJSGlFKUaBVL8GgWR0CTR8ncL0BfdWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 248, "n_steps": 1024, "gamma": 0.999, "gae_lambda": 0.98, "ent_coef": 0.01, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 4, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/yZmZmZmZmoWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.29 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.8.10", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.21.6", "Gym": "0.21.0"}}
|
|
|
1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7f2fab7cd820>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7f2fab7cd8b0>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7f2fab7cd940>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7f2fab7cd9d0>", "_build": "<function ActorCriticPolicy._build at 0x7f2fab7cda60>", "forward": "<function ActorCriticPolicy.forward at 0x7f2fab7cdaf0>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x7f2fab7cdb80>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7f2fab7cdc10>", "_predict": "<function ActorCriticPolicy._predict at 0x7f2fab7cdca0>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7f2fab7cdd30>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7f2fab7cddc0>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7f2fab7cde50>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7f2fab7d1c00>"}, "verbose": 1, "policy_kwargs": {}, "observation_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVnwEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLCIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWIAAAAAAAAAAAAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/5RoCksIhZSMAUOUdJRSlIwEaGlnaJRoEiiWIAAAAAAAAAAAAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAf5RoCksIhZRoFXSUUpSMDWJvdW5kZWRfYmVsb3eUaBIolggAAAAAAAAAAAAAAAAAAACUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLCIWUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYIAAAAAAAAAAAAAAAAAAAAlGghSwiFlGgVdJRSlIwKX25wX3JhbmRvbZROdWIu", "dtype": "float32", "_shape": [8], "low": "[-inf -inf -inf -inf -inf -inf -inf -inf]", "high": "[inf inf inf inf inf inf inf inf]", "bounded_below": "[False False False False False False False False]", "bounded_above": "[False False False False False False False False]", "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.discrete.Discrete'>", ":serialized:": "gAWVggAAAAAAAACME2d5bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpRLBIwGX3NoYXBllCmMBWR0eXBllIwFbnVtcHmUaAeTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowKX25wX3JhbmRvbZROdWIu", "n": 4, "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 16, "num_timesteps": 1015808, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1678915464755904948, "learning_rate": 0.0003, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/M6kqMFUyYYWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAADNpAr4Kg0i7NnxuuJvSqLW37nM89leSNwAAgD8AAIA/zZoTvT0qXbk+yYI53k8lNJojmrvdoZ64AACAPwAAgD9mOzM95N2IPjp6x72k+6S+KlwNvY6AWb0AAAAAAAAAADPVBr1vNGM+ne/vPQH+lr4COoA9VPutvAAAAAAAAAAAM8Y2vYjHgz8zpNe7xZa4vvqUL72OcOY8AAAAAAAAAAAzXYY9w2U2unxFEjiRImyyN7F7u2XjKbcAAIA/AACAP42qvD1UArI+bXPnvSwbh75jLry8SCnqOwAAAAAAAAAAjZq5vXvcgbr+R7s4wke7MxpcIzvog9q3AACAPwAAgD8zHRe8UgXbu7KSiDw0zYw8a51PPYaubL0AAIA/AACAP3pWib4T+E0/Xq7lPZc5ur6yssa9OWoVPQAAAAAAAAAA5uIzvYWDo7lpYRK6SWkZtYXvrjvTGDE5AACAPwAAgD/Ar7K9k+N1P/At3b0ZVa6+qA6Wvc5ukTwAAAAAAAAAAM2ibbzw0RE/qJ4hvRH/lr41jcG8honzPAAAAAAAAAAAQN/QvTdiST4YH0Q9Y1CNvvMJTL1KZCe9AAAAAAAAAAAzo8q8XfsXP4KxTzx0pJC+bndQPCmspr0AAAAAAAAAAEBqgT2PAlI5LomIPa63Jr4ShUA9Dvg1vgAAAAAAAIA/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.015808000000000044, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVehAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMILQlQU8tgckCUhpRSlIwBbJRNeAGMAXSUR0CTkkff4yoGdX2UKGgGaAloD0MIIlSp2QMTZkCUhpRSlGgVTegDaBZHQJOUh51Ng0F1fZQoaAZoCWgPQwgyrU1j+6pxQJSGlFKUaBVN1wFoFkdAk5SVSKm8/XV9lChoBmgJaA9DCIEhq1s9MmVAlIaUUpRoFU3oA2gWR0CTlJ+aScLCdX2UKGgGaAloD0MITpmbbwS4cUCUhpRSlGgVTU4BaBZHQJOU+o73fyh1fZQoaAZoCWgPQwhlNsgkow1vQJSGlFKUaBVNNwJoFkdAk5V0THsC1nV9lChoBmgJaA9DCNbh6CodAnBAlIaUUpRoFU1LAWgWR0CTla/jsD4hdX2UKGgGaAloD0MIiA6BIwGVbkCUhpRSlGgVTf8BaBZHQJOa8H0K7Zp1fZQoaAZoCWgPQwh9XYb/dBxyQJSGlFKUaBVNPAFoFkdAk50KzzErG3V9lChoBmgJaA9DCEXwv5WsVXJAlIaUUpRoFU3tAWgWR0CTnWt5UtI1dX2UKGgGaAloD0MIGR77WWwWckCUhpRSlGgVTT0BaBZHQJOdx+nZTQ51fZQoaAZoCWgPQwhw6ZjzTGJwQJSGlFKUaBVNjwFoFkdAk587ULDyfHV9lChoBmgJaA9DCNVZLbDHDW5AlIaUUpRoFU1TAWgWR0CTorPxhDw6dX2UKGgGaAloD0MIvD/eq1bwbUCUhpRSlGgVTaoBaBZHQJOj6CuloDh1fZQoaAZoCWgPQwiJt86/3aJuQJSGlFKUaBVNiwFoFkdAk6a6a5PM0XV9lChoBmgJaA9DCI4FhUEZy3JAlIaUUpRoFU2xAWgWR0CTpvy/9Hc2dX2UKGgGaAloD0MIcyuE1VjPcUCUhpRSlGgVTSUDaBZHQJOnv9FWn0l1fZQoaAZoCWgPQwh2i8BYX69vQJSGlFKUaBVNXAJoFkdAk7zKUiY9gXV9lChoBmgJaA9DCF+3CIz1ZHJAlIaUUpRoFU3iAWgWR0CTvT65Gz8hdX2UKGgGaAloD0MIMXpuoaspbkCUhpRSlGgVTSQBaBZHQJPAxMnJDE51fZQoaAZoCWgPQwgk0GBTZ4JuQJSGlFKUaBVNJwNoFkdAk8IgT238XXV9lChoBmgJaA9DCPYjRWRYpW9AlIaUUpRoFU3zAmgWR0CTwi8V58jSdX2UKGgGaAloD0MIDw72JsZXckCUhpRSlGgVTYUBaBZHQJPD5YISlFd1fZQoaAZoCWgPQwjO+/84Yd1wQJSGlFKUaBVNkAFoFkdAk8QM0UGmk3V9lChoBmgJaA9DCA5LAz8qPm9AlIaUUpRoFU3KAWgWR0CTxKQQL/jsdX2UKGgGaAloD0MIKAzKNBp0bECUhpRSlGgVTccBaBZHQJPHyQHRkVh1fZQoaAZoCWgPQwjV6xaBMYhwQJSGlFKUaBVNagFoFkdAk8ijhky1u3V9lChoBmgJaA9DCIMUPIVcVGxAlIaUUpRoFU33AmgWR0CTy9OoYNy6dX2UKGgGaAloD0MI7C+7Jw/7ckCUhpRSlGgVTSEBaBZHQJPL3i1iONp1fZQoaAZoCWgPQwi0yHa+3+ZyQJSGlFKUaBVN7gJoFkdAk8wLv9cbBHV9lChoBmgJaA9DCO4HPDAAiW9AlIaUUpRoFU0bAWgWR0CTzlRoRIz4dX2UKGgGaAloD0MIcEG2LJ+FcUCUhpRSlGgVTXIBaBZHQJPOmnbZezF1fZQoaAZoCWgPQwhrC89LxWdxQJSGlFKUaBVNrgFoFkdAk89sM7U5MnV9lChoBmgJaA9DCGZs6GZ/aW1AlIaUUpRoFU0oAWgWR0CTz8gjhUBGdX2UKGgGaAloD0MIT8x6MRQVcECUhpRSlGgVTQwBaBZHQJPQC4Cp3ot1fZQoaAZoCWgPQwjuJ2N82FFwQJSGlFKUaBVNzQFoFkdAk9BPGMn7YXV9lChoBmgJaA9DCLBZLhsdPnJAlIaUUpRoFU1VAWgWR0CT0tf4AS39dX2UKGgGaAloD0MImQ8IdKYnbUCUhpRSlGgVTV0CaBZHQJPS+2+fywx1fZQoaAZoCWgPQwjc9dIUAf43QJSGlFKUaBVL4mgWR0CT0973fyf+dX2UKGgGaAloD0MIMX2vIfjdcECUhpRSlGgVTS0CaBZHQJPUUx7AtWd1fZQoaAZoCWgPQwj4b16cOHFzQJSGlFKUaBVNWwFoFkdAk9WcRxtHhHV9lChoBmgJaA9DCAMF3smnxUtAlIaUUpRoFUvwaBZHQJPW7D+BH091fZQoaAZoCWgPQwhdxHdiFgdyQJSGlFKUaBVNSgFoFkdAk9pZqZc9n3V9lChoBmgJaA9DCNhhTPq7pHFAlIaUUpRoFU0yAWgWR0CT20LA57w8dX2UKGgGaAloD0MIxLDDmPQBc0CUhpRSlGgVTZ8BaBZHQJPbbaYeDFt1fZQoaAZoCWgPQwgyIeaSqnJuQJSGlFKUaBVNAQJoFkdAk9uubd8ArHV9lChoBmgJaA9DCAPOUrKcwENAlIaUUpRoFUvyaBZHQJPcFO6/Zdx1fZQoaAZoCWgPQwg/WMaGbkJwQJSGlFKUaBVNlgJoFkdAk91T9fkWAXV9lChoBmgJaA9DCOYIGcizKHJAlIaUUpRoFU2fAWgWR0CT3yMfA9FGdX2UKGgGaAloD0MIotPzbiwnb0CUhpRSlGgVTVQCaBZHQJPiabkOqed1fZQoaAZoCWgPQwhe29stCTRyQJSGlFKUaBVNkAFoFkdAk+KI7q6e5HV9lChoBmgJaA9DCA+22O1zPXFAlIaUUpRoFU3lAWgWR0CT4rLIgeRxdX2UKGgGaAloD0MIEheARukkTUCUhpRSlGgVS/loFkdAk+RMFQl8gXV9lChoBmgJaA9DCGWMD7PXH3NAlIaUUpRoFU0qA2gWR0CT5H+JP69CdX2UKGgGaAloD0MIaW/whUnfbkCUhpRSlGgVTZcBaBZHQJPl1alk6Lh1fZQoaAZoCWgPQwh+U1ipIOtxQJSGlFKUaBVNdQFoFkdAk+XtZJTVD3V9lChoBmgJaA9DCKsF9phI2XBAlIaUUpRoFU3ZAWgWR0CT5mDmbLEDdX2UKGgGaAloD0MIfCjRksctcECUhpRSlGgVTSUBaBZHQJP72OYIBzV1fZQoaAZoCWgPQwh2ptB5TZ1wQJSGlFKUaBVN+QFoFkdAk/1y8BdUsHV9lChoBmgJaA9DCPn02JaBOXBAlIaUUpRoFU1oAWgWR0CT/zZLqUu+dX2UKGgGaAloD0MIn+dPGxV8cECUhpRSlGgVTTkBaBZHQJQBzGbTc7B1fZQoaAZoCWgPQwgabyu99lpxQJSGlFKUaBVNnQFoFkdAlAI4Jmdy1nV9lChoBmgJaA9DCAKbc/CMhHJAlIaUUpRoFU2LAWgWR0CUA4RkmQbNdX2UKGgGaAloD0MIAyZw6+6icUCUhpRSlGgVTTUBaBZHQJQFjaZhKDl1fZQoaAZoCWgPQwjJchJK3yZyQJSGlFKUaBVNNgNoFkdAlAWprtVrAXV9lChoBmgJaA9DCNApyM/GBG9AlIaUUpRoFU0+AWgWR0CUBbntv4ucdX2UKGgGaAloD0MI2xZlNsgcTUCUhpRSlGgVS9loFkdAlAYIcR15jnV9lChoBmgJaA9DCMPYQpCDpXFAlIaUUpRoFU3+AWgWR0CUBrfzjFQ3dX2UKGgGaAloD0MIlbcjnJaLb0CUhpRSlGgVTQkBaBZHQJQGwFyJbdJ1fZQoaAZoCWgPQwiW6ZeIt3duQJSGlFKUaBVNMgFoFkdAlAbQ9ic5KnV9lChoBmgJaA9DCHcU56ijRmxAlIaUUpRoFU0aAWgWR0CUB0r5qM3qdX2UKGgGaAloD0MIrHR3nY1BcECUhpRSlGgVTX0BaBZHQJQHjGcWj451fZQoaAZoCWgPQwizQLtDigNSQJSGlFKUaBVL1GgWR0CUB+UoKD02dX2UKGgGaAloD0MIQ8ajVILVcECUhpRSlGgVTUYBaBZHQJQJ91RtP551fZQoaAZoCWgPQwhgrkULEAlyQJSGlFKUaBVNDAFoFkdAlArBSYPXkHV9lChoBmgJaA9DCJ2BkZc1LG1AlIaUUpRoFU3nAWgWR0CUC+/rB0p3dX2UKGgGaAloD0MI58jKL8OPcUCUhpRSlGgVTVcBaBZHQJQNrdoFmnR1fZQoaAZoCWgPQwgqG9ZUFt9xQJSGlFKUaBVNQgFoFkdAlA3edwvQGHV9lChoBmgJaA9DCAkYXd4c4XBAlIaUUpRoFU3qAWgWR0CUDej/uLJkdX2UKGgGaAloD0MIHuBJC5djcUCUhpRSlGgVTQsBaBZHQJQO2T4cm0F1fZQoaAZoCWgPQwhZ+zvbIw5wQJSGlFKUaBVNNQFoFkdAlA87QLNOd3V9lChoBmgJaA9DCN1dZ0P+nnFAlIaUUpRoFU0lAWgWR0CUD8rRjSXudX2UKGgGaAloD0MIx/DYzyJDcUCUhpRSlGgVTUoBaBZHQJQQVnh86WB1fZQoaAZoCWgPQwjt8xjlGQFvQJSGlFKUaBVNbgFoFkdAlBKPllsguHV9lChoBmgJaA9DCPBpTl4kUXBAlIaUUpRoFU1fAWgWR0CUEwJkXk5qdX2UKGgGaAloD0MIBrzMsFEvcECUhpRSlGgVTWcBaBZHQJQT0xesxPB1fZQoaAZoCWgPQwgjTifZ6hdRQJSGlFKUaBVL8GgWR0CUFsWmgrYodX2UKGgGaAloD0MIHjUmxNydckCUhpRSlGgVTXkBaBZHQJQXk6nzg/F1fZQoaAZoCWgPQwih9ls7kfpxQJSGlFKUaBVN/wFoFkdAlBjbB9Cu2nV9lChoBmgJaA9DCNHq5AxFZXJAlIaUUpRoFU0tAWgWR0CUGRrfcer/dX2UKGgGaAloD0MIvMtFfGfCcECUhpRSlGgVTVMCaBZHQJQaKgsbvPV1fZQoaAZoCWgPQwhiLxSwXUJwQJSGlFKUaBVNJwFoFkdAlBsGzF+/g3V9lChoBmgJaA9DCIih1clZ43BAlIaUUpRoFU2DAmgWR0CUG7Tt9hJAdX2UKGgGaAloD0MI7iHhe/+Lb0CUhpRSlGgVTVEBaBZHQJQb6XSjQAx1fZQoaAZoCWgPQwhNofMaO8NsQJSGlFKUaBVNeQFoFkdAlBwDAN5MUXV9lChoBmgJaA9DCEJbzqW4p3NAlIaUUpRoFU39AWgWR0CUHVc9W6sidX2UKGgGaAloD0MI/cHAc6+DcECUhpRSlGgVTegBaBZHQJQd2Vv/BFd1fZQoaAZoCWgPQwi6L2e2qwdyQJSGlFKUaBVNLgFoFkdAlB4JDJEH+3V9lChoBmgJaA9DCJIhx9azl3FAlIaUUpRoFU3SAWgWR0CUH6K77Kq5dX2UKGgGaAloD0MIzjl4JvSzcUCUhpRSlGgVTawBaBZHQJQfxDYywfR1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 248, "n_steps": 1024, "gamma": 0.999, "gae_lambda": 0.98, "ent_coef": 0.01, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 4, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/yZmZmZmZmoWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.31 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.9.16", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
ppo-LunarLander-v2.zip
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:516064146a8cda681d5f56efb5fe3b241d635c1fc2d01778d892f0048b80fb3f
|
3 |
+
size 147421
|
ppo-LunarLander-v2/data
CHANGED
@@ -4,20 +4,20 @@
|
|
4 |
":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
-
"__init__": "<function ActorCriticPolicy.__init__ at
|
8 |
-
"_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at
|
9 |
-
"reset_noise": "<function ActorCriticPolicy.reset_noise at
|
10 |
-
"_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at
|
11 |
-
"_build": "<function ActorCriticPolicy._build at
|
12 |
-
"forward": "<function ActorCriticPolicy.forward at
|
13 |
-
"extract_features": "<function ActorCriticPolicy.extract_features at
|
14 |
-
"_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at
|
15 |
-
"_predict": "<function ActorCriticPolicy._predict at
|
16 |
-
"evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at
|
17 |
-
"get_distribution": "<function ActorCriticPolicy.get_distribution at
|
18 |
-
"predict_values": "<function ActorCriticPolicy.predict_values at
|
19 |
"__abstractmethods__": "frozenset()",
|
20 |
-
"_abc_impl": "<_abc_data object at
|
21 |
},
|
22 |
"verbose": 1,
|
23 |
"policy_kwargs": {},
|
@@ -48,16 +48,16 @@
|
|
48 |
"_num_timesteps_at_start": 0,
|
49 |
"seed": null,
|
50 |
"action_noise": null,
|
51 |
-
"start_time":
|
52 |
"learning_rate": 0.0003,
|
53 |
"tensorboard_log": null,
|
54 |
"lr_schedule": {
|
55 |
":type:": "<class 'function'>",
|
56 |
-
":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+
|
57 |
},
|
58 |
"_last_obs": {
|
59 |
":type:": "<class 'numpy.ndarray'>",
|
60 |
-
":serialized:": "
|
61 |
},
|
62 |
"_last_episode_starts": {
|
63 |
":type:": "<class 'numpy.ndarray'>",
|
@@ -70,7 +70,7 @@
|
|
70 |
"_current_progress_remaining": -0.015808000000000044,
|
71 |
"ep_info_buffer": {
|
72 |
":type:": "<class 'collections.deque'>",
|
73 |
-
":serialized:": "
|
74 |
},
|
75 |
"ep_success_buffer": {
|
76 |
":type:": "<class 'collections.deque'>",
|
@@ -87,7 +87,7 @@
|
|
87 |
"n_epochs": 4,
|
88 |
"clip_range": {
|
89 |
":type:": "<class 'function'>",
|
90 |
-
":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+
|
91 |
},
|
92 |
"clip_range_vf": null,
|
93 |
"normalize_advantage": true,
|
|
|
4 |
":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function ActorCriticPolicy.__init__ at 0x7f2fab7cd820>",
|
8 |
+
"_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7f2fab7cd8b0>",
|
9 |
+
"reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7f2fab7cd940>",
|
10 |
+
"_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7f2fab7cd9d0>",
|
11 |
+
"_build": "<function ActorCriticPolicy._build at 0x7f2fab7cda60>",
|
12 |
+
"forward": "<function ActorCriticPolicy.forward at 0x7f2fab7cdaf0>",
|
13 |
+
"extract_features": "<function ActorCriticPolicy.extract_features at 0x7f2fab7cdb80>",
|
14 |
+
"_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7f2fab7cdc10>",
|
15 |
+
"_predict": "<function ActorCriticPolicy._predict at 0x7f2fab7cdca0>",
|
16 |
+
"evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7f2fab7cdd30>",
|
17 |
+
"get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7f2fab7cddc0>",
|
18 |
+
"predict_values": "<function ActorCriticPolicy.predict_values at 0x7f2fab7cde50>",
|
19 |
"__abstractmethods__": "frozenset()",
|
20 |
+
"_abc_impl": "<_abc._abc_data object at 0x7f2fab7d1c00>"
|
21 |
},
|
22 |
"verbose": 1,
|
23 |
"policy_kwargs": {},
|
|
|
48 |
"_num_timesteps_at_start": 0,
|
49 |
"seed": null,
|
50 |
"action_noise": null,
|
51 |
+
"start_time": 1678915464755904948,
|
52 |
"learning_rate": 0.0003,
|
53 |
"tensorboard_log": null,
|
54 |
"lr_schedule": {
|
55 |
":type:": "<class 'function'>",
|
56 |
+
":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/M6kqMFUyYYWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="
|
57 |
},
|
58 |
"_last_obs": {
|
59 |
":type:": "<class 'numpy.ndarray'>",
|
60 |
+
":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAADNpAr4Kg0i7NnxuuJvSqLW37nM89leSNwAAgD8AAIA/zZoTvT0qXbk+yYI53k8lNJojmrvdoZ64AACAPwAAgD9mOzM95N2IPjp6x72k+6S+KlwNvY6AWb0AAAAAAAAAADPVBr1vNGM+ne/vPQH+lr4COoA9VPutvAAAAAAAAAAAM8Y2vYjHgz8zpNe7xZa4vvqUL72OcOY8AAAAAAAAAAAzXYY9w2U2unxFEjiRImyyN7F7u2XjKbcAAIA/AACAP42qvD1UArI+bXPnvSwbh75jLry8SCnqOwAAAAAAAAAAjZq5vXvcgbr+R7s4wke7MxpcIzvog9q3AACAPwAAgD8zHRe8UgXbu7KSiDw0zYw8a51PPYaubL0AAIA/AACAP3pWib4T+E0/Xq7lPZc5ur6yssa9OWoVPQAAAAAAAAAA5uIzvYWDo7lpYRK6SWkZtYXvrjvTGDE5AACAPwAAgD/Ar7K9k+N1P/At3b0ZVa6+qA6Wvc5ukTwAAAAAAAAAAM2ibbzw0RE/qJ4hvRH/lr41jcG8honzPAAAAAAAAAAAQN/QvTdiST4YH0Q9Y1CNvvMJTL1KZCe9AAAAAAAAAAAzo8q8XfsXP4KxTzx0pJC+bndQPCmspr0AAAAAAAAAAEBqgT2PAlI5LomIPa63Jr4ShUA9Dvg1vgAAAAAAAIA/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="
|
61 |
},
|
62 |
"_last_episode_starts": {
|
63 |
":type:": "<class 'numpy.ndarray'>",
|
|
|
70 |
"_current_progress_remaining": -0.015808000000000044,
|
71 |
"ep_info_buffer": {
|
72 |
":type:": "<class 'collections.deque'>",
|
73 |
+
":serialized:": "gAWVehAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMILQlQU8tgckCUhpRSlIwBbJRNeAGMAXSUR0CTkkff4yoGdX2UKGgGaAloD0MIIlSp2QMTZkCUhpRSlGgVTegDaBZHQJOUh51Ng0F1fZQoaAZoCWgPQwgyrU1j+6pxQJSGlFKUaBVN1wFoFkdAk5SVSKm8/XV9lChoBmgJaA9DCIEhq1s9MmVAlIaUUpRoFU3oA2gWR0CTlJ+aScLCdX2UKGgGaAloD0MITpmbbwS4cUCUhpRSlGgVTU4BaBZHQJOU+o73fyh1fZQoaAZoCWgPQwhlNsgkow1vQJSGlFKUaBVNNwJoFkdAk5V0THsC1nV9lChoBmgJaA9DCNbh6CodAnBAlIaUUpRoFU1LAWgWR0CTla/jsD4hdX2UKGgGaAloD0MIiA6BIwGVbkCUhpRSlGgVTf8BaBZHQJOa8H0K7Zp1fZQoaAZoCWgPQwh9XYb/dBxyQJSGlFKUaBVNPAFoFkdAk50KzzErG3V9lChoBmgJaA9DCEXwv5WsVXJAlIaUUpRoFU3tAWgWR0CTnWt5UtI1dX2UKGgGaAloD0MIGR77WWwWckCUhpRSlGgVTT0BaBZHQJOdx+nZTQ51fZQoaAZoCWgPQwhw6ZjzTGJwQJSGlFKUaBVNjwFoFkdAk587ULDyfHV9lChoBmgJaA9DCNVZLbDHDW5AlIaUUpRoFU1TAWgWR0CTorPxhDw6dX2UKGgGaAloD0MIvD/eq1bwbUCUhpRSlGgVTaoBaBZHQJOj6CuloDh1fZQoaAZoCWgPQwiJt86/3aJuQJSGlFKUaBVNiwFoFkdAk6a6a5PM0XV9lChoBmgJaA9DCI4FhUEZy3JAlIaUUpRoFU2xAWgWR0CTpvy/9Hc2dX2UKGgGaAloD0MIcyuE1VjPcUCUhpRSlGgVTSUDaBZHQJOnv9FWn0l1fZQoaAZoCWgPQwh2i8BYX69vQJSGlFKUaBVNXAJoFkdAk7zKUiY9gXV9lChoBmgJaA9DCF+3CIz1ZHJAlIaUUpRoFU3iAWgWR0CTvT65Gz8hdX2UKGgGaAloD0MIMXpuoaspbkCUhpRSlGgVTSQBaBZHQJPAxMnJDE51fZQoaAZoCWgPQwgk0GBTZ4JuQJSGlFKUaBVNJwNoFkdAk8IgT238XXV9lChoBmgJaA9DCPYjRWRYpW9AlIaUUpRoFU3zAmgWR0CTwi8V58jSdX2UKGgGaAloD0MIDw72JsZXckCUhpRSlGgVTYUBaBZHQJPD5YISlFd1fZQoaAZoCWgPQwjO+/84Yd1wQJSGlFKUaBVNkAFoFkdAk8QM0UGmk3V9lChoBmgJaA9DCA5LAz8qPm9AlIaUUpRoFU3KAWgWR0CTxKQQL/jsdX2UKGgGaAloD0MIKAzKNBp0bECUhpRSlGgVTccBaBZHQJPHyQHRkVh1fZQoaAZoCWgPQwjV6xaBMYhwQJSGlFKUaBVNagFoFkdAk8ijhky1u3V9lChoBmgJaA9DCIMUPIVcVGxAlIaUUpRoFU33AmgWR0CTy9OoYNy6dX2UKGgGaAloD0MI7C+7Jw/7ckCUhpRSlGgVTSEBaBZHQJPL3i1iONp1fZQoaAZoCWgPQwi0yHa+3+ZyQJSGlFKUaBVN7gJoFkdAk8wLv9cbBHV9lChoBmgJaA9DCO4HPDAAiW9AlIaUUpRoFU0bAWgWR0CTzlRoRIz4dX2UKGgGaAloD0MIcEG2LJ+FcUCUhpRSlGgVTXIBaBZHQJPOmnbZezF1fZQoaAZoCWgPQwhrC89LxWdxQJSGlFKUaBVNrgFoFkdAk89sM7U5MnV9lChoBmgJaA9DCGZs6GZ/aW1AlIaUUpRoFU0oAWgWR0CTz8gjhUBGdX2UKGgGaAloD0MIT8x6MRQVcECUhpRSlGgVTQwBaBZHQJPQC4Cp3ot1fZQoaAZoCWgPQwjuJ2N82FFwQJSGlFKUaBVNzQFoFkdAk9BPGMn7YXV9lChoBmgJaA9DCLBZLhsdPnJAlIaUUpRoFU1VAWgWR0CT0tf4AS39dX2UKGgGaAloD0MImQ8IdKYnbUCUhpRSlGgVTV0CaBZHQJPS+2+fywx1fZQoaAZoCWgPQwjc9dIUAf43QJSGlFKUaBVL4mgWR0CT0973fyf+dX2UKGgGaAloD0MIMX2vIfjdcECUhpRSlGgVTS0CaBZHQJPUUx7AtWd1fZQoaAZoCWgPQwj4b16cOHFzQJSGlFKUaBVNWwFoFkdAk9WcRxtHhHV9lChoBmgJaA9DCAMF3smnxUtAlIaUUpRoFUvwaBZHQJPW7D+BH091fZQoaAZoCWgPQwhdxHdiFgdyQJSGlFKUaBVNSgFoFkdAk9pZqZc9n3V9lChoBmgJaA9DCNhhTPq7pHFAlIaUUpRoFU0yAWgWR0CT20LA57w8dX2UKGgGaAloD0MIxLDDmPQBc0CUhpRSlGgVTZ8BaBZHQJPbbaYeDFt1fZQoaAZoCWgPQwgyIeaSqnJuQJSGlFKUaBVNAQJoFkdAk9uubd8ArHV9lChoBmgJaA9DCAPOUrKcwENAlIaUUpRoFUvyaBZHQJPcFO6/Zdx1fZQoaAZoCWgPQwg/WMaGbkJwQJSGlFKUaBVNlgJoFkdAk91T9fkWAXV9lChoBmgJaA9DCOYIGcizKHJAlIaUUpRoFU2fAWgWR0CT3yMfA9FGdX2UKGgGaAloD0MIotPzbiwnb0CUhpRSlGgVTVQCaBZHQJPiabkOqed1fZQoaAZoCWgPQwhe29stCTRyQJSGlFKUaBVNkAFoFkdAk+KI7q6e5HV9lChoBmgJaA9DCA+22O1zPXFAlIaUUpRoFU3lAWgWR0CT4rLIgeRxdX2UKGgGaAloD0MIEheARukkTUCUhpRSlGgVS/loFkdAk+RMFQl8gXV9lChoBmgJaA9DCGWMD7PXH3NAlIaUUpRoFU0qA2gWR0CT5H+JP69CdX2UKGgGaAloD0MIaW/whUnfbkCUhpRSlGgVTZcBaBZHQJPl1alk6Lh1fZQoaAZoCWgPQwh+U1ipIOtxQJSGlFKUaBVNdQFoFkdAk+XtZJTVD3V9lChoBmgJaA9DCKsF9phI2XBAlIaUUpRoFU3ZAWgWR0CT5mDmbLEDdX2UKGgGaAloD0MIfCjRksctcECUhpRSlGgVTSUBaBZHQJP72OYIBzV1fZQoaAZoCWgPQwh2ptB5TZ1wQJSGlFKUaBVN+QFoFkdAk/1y8BdUsHV9lChoBmgJaA9DCPn02JaBOXBAlIaUUpRoFU1oAWgWR0CT/zZLqUu+dX2UKGgGaAloD0MIn+dPGxV8cECUhpRSlGgVTTkBaBZHQJQBzGbTc7B1fZQoaAZoCWgPQwgabyu99lpxQJSGlFKUaBVNnQFoFkdAlAI4Jmdy1nV9lChoBmgJaA9DCAKbc/CMhHJAlIaUUpRoFU2LAWgWR0CUA4RkmQbNdX2UKGgGaAloD0MIAyZw6+6icUCUhpRSlGgVTTUBaBZHQJQFjaZhKDl1fZQoaAZoCWgPQwjJchJK3yZyQJSGlFKUaBVNNgNoFkdAlAWprtVrAXV9lChoBmgJaA9DCNApyM/GBG9AlIaUUpRoFU0+AWgWR0CUBbntv4ucdX2UKGgGaAloD0MI2xZlNsgcTUCUhpRSlGgVS9loFkdAlAYIcR15jnV9lChoBmgJaA9DCMPYQpCDpXFAlIaUUpRoFU3+AWgWR0CUBrfzjFQ3dX2UKGgGaAloD0MIlbcjnJaLb0CUhpRSlGgVTQkBaBZHQJQGwFyJbdJ1fZQoaAZoCWgPQwiW6ZeIt3duQJSGlFKUaBVNMgFoFkdAlAbQ9ic5KnV9lChoBmgJaA9DCHcU56ijRmxAlIaUUpRoFU0aAWgWR0CUB0r5qM3qdX2UKGgGaAloD0MIrHR3nY1BcECUhpRSlGgVTX0BaBZHQJQHjGcWj451fZQoaAZoCWgPQwizQLtDigNSQJSGlFKUaBVL1GgWR0CUB+UoKD02dX2UKGgGaAloD0MIQ8ajVILVcECUhpRSlGgVTUYBaBZHQJQJ91RtP551fZQoaAZoCWgPQwhgrkULEAlyQJSGlFKUaBVNDAFoFkdAlArBSYPXkHV9lChoBmgJaA9DCJ2BkZc1LG1AlIaUUpRoFU3nAWgWR0CUC+/rB0p3dX2UKGgGaAloD0MI58jKL8OPcUCUhpRSlGgVTVcBaBZHQJQNrdoFmnR1fZQoaAZoCWgPQwgqG9ZUFt9xQJSGlFKUaBVNQgFoFkdAlA3edwvQGHV9lChoBmgJaA9DCAkYXd4c4XBAlIaUUpRoFU3qAWgWR0CUDej/uLJkdX2UKGgGaAloD0MIHuBJC5djcUCUhpRSlGgVTQsBaBZHQJQO2T4cm0F1fZQoaAZoCWgPQwhZ+zvbIw5wQJSGlFKUaBVNNQFoFkdAlA87QLNOd3V9lChoBmgJaA9DCN1dZ0P+nnFAlIaUUpRoFU0lAWgWR0CUD8rRjSXudX2UKGgGaAloD0MIx/DYzyJDcUCUhpRSlGgVTUoBaBZHQJQQVnh86WB1fZQoaAZoCWgPQwjt8xjlGQFvQJSGlFKUaBVNbgFoFkdAlBKPllsguHV9lChoBmgJaA9DCPBpTl4kUXBAlIaUUpRoFU1fAWgWR0CUEwJkXk5qdX2UKGgGaAloD0MIBrzMsFEvcECUhpRSlGgVTWcBaBZHQJQT0xesxPB1fZQoaAZoCWgPQwgjTifZ6hdRQJSGlFKUaBVL8GgWR0CUFsWmgrYodX2UKGgGaAloD0MIHjUmxNydckCUhpRSlGgVTXkBaBZHQJQXk6nzg/F1fZQoaAZoCWgPQwih9ls7kfpxQJSGlFKUaBVN/wFoFkdAlBjbB9Cu2nV9lChoBmgJaA9DCNHq5AxFZXJAlIaUUpRoFU0tAWgWR0CUGRrfcer/dX2UKGgGaAloD0MIvMtFfGfCcECUhpRSlGgVTVMCaBZHQJQaKgsbvPV1fZQoaAZoCWgPQwhiLxSwXUJwQJSGlFKUaBVNJwFoFkdAlBsGzF+/g3V9lChoBmgJaA9DCIih1clZ43BAlIaUUpRoFU2DAmgWR0CUG7Tt9hJAdX2UKGgGaAloD0MI7iHhe/+Lb0CUhpRSlGgVTVEBaBZHQJQb6XSjQAx1fZQoaAZoCWgPQwhNofMaO8NsQJSGlFKUaBVNeQFoFkdAlBwDAN5MUXV9lChoBmgJaA9DCEJbzqW4p3NAlIaUUpRoFU39AWgWR0CUHVc9W6sidX2UKGgGaAloD0MI/cHAc6+DcECUhpRSlGgVTegBaBZHQJQd2Vv/BFd1fZQoaAZoCWgPQwi6L2e2qwdyQJSGlFKUaBVNLgFoFkdAlB4JDJEH+3V9lChoBmgJaA9DCJIhx9azl3FAlIaUUpRoFU3SAWgWR0CUH6K77Kq5dX2UKGgGaAloD0MIzjl4JvSzcUCUhpRSlGgVTawBaBZHQJQfxDYywfR1ZS4="
|
74 |
},
|
75 |
"ep_success_buffer": {
|
76 |
":type:": "<class 'collections.deque'>",
|
|
|
87 |
"n_epochs": 4,
|
88 |
"clip_range": {
|
89 |
":type:": "<class 'function'>",
|
90 |
+
":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/yZmZmZmZmoWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="
|
91 |
},
|
92 |
"clip_range_vf": null,
|
93 |
"normalize_advantage": true,
|
ppo-LunarLander-v2/policy.optimizer.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 87929
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1b7fc7aee1292770b404f45e19179271da71b9da4dd70159da026e0f6c1dc9f2
|
3 |
size 87929
|
ppo-LunarLander-v2/policy.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 43393
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:14d3fbd7f286853fa51149b52124013e718c7a6d4edcb21b4cbb7806388e9dec
|
3 |
size 43393
|
ppo-LunarLander-v2/system_info.txt
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
-
- OS: Linux-5.10.147+-x86_64-with-glibc2.
|
2 |
-
- Python: 3.
|
3 |
- Stable-Baselines3: 1.7.0
|
4 |
- PyTorch: 1.13.1+cu116
|
5 |
- GPU Enabled: True
|
6 |
-
- Numpy: 1.
|
7 |
- Gym: 0.21.0
|
|
|
1 |
+
- OS: Linux-5.10.147+-x86_64-with-glibc2.31 # 1 SMP Sat Dec 10 16:00:40 UTC 2022
|
2 |
+
- Python: 3.9.16
|
3 |
- Stable-Baselines3: 1.7.0
|
4 |
- PyTorch: 1.13.1+cu116
|
5 |
- GPU Enabled: True
|
6 |
+
- Numpy: 1.22.4
|
7 |
- Gym: 0.21.0
|
replay.mp4
CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
|
|
results.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"
|
|
|
1 |
+
{"mean_reward": 270.4158864801113, "std_reward": 13.263353279770756, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-03-15T21:47:07.010420"}
|