Commit
·
7205f48
1
Parent(s):
08afe72
Initial commit
Browse files- README.md +1 -1
- a2c-PandaReachDense-v2.zip +1 -1
- a2c-PandaReachDense-v2/data +10 -10
- a2c-PandaReachDense-v2/policy.optimizer.pth +1 -1
- a2c-PandaReachDense-v2/policy.pth +1 -1
- config.json +1 -1
- replay.mp4 +0 -0
- results.json +1 -1
- vec_normalize.pkl +1 -1
README.md
CHANGED
@@ -16,7 +16,7 @@ model-index:
|
|
16 |
type: PandaReachDense-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
-
value: -16
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
|
|
16 |
type: PandaReachDense-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
+
value: -1.16 +/- 0.33
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
a2c-PandaReachDense-v2.zip
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 108011
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:862f4463cd2606e69492b6ae4d524f40df0c514bb5729191907cc3561d6e9c61
|
3 |
size 108011
|
a2c-PandaReachDense-v2/data
CHANGED
@@ -4,9 +4,9 @@
|
|
4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
-
"__init__": "<function MultiInputActorCriticPolicy.__init__ at
|
8 |
"__abstractmethods__": "frozenset()",
|
9 |
-
"_abc_impl": "<_abc_data object at
|
10 |
},
|
11 |
"verbose": 1,
|
12 |
"policy_kwargs": {
|
@@ -46,7 +46,7 @@
|
|
46 |
"_num_timesteps_at_start": 0,
|
47 |
"seed": null,
|
48 |
"action_noise": null,
|
49 |
-
"start_time":
|
50 |
"learning_rate": 0.0007,
|
51 |
"tensorboard_log": null,
|
52 |
"lr_schedule": {
|
@@ -55,10 +55,10 @@
|
|
55 |
},
|
56 |
"_last_obs": {
|
57 |
":type:": "<class 'collections.OrderedDict'>",
|
58 |
-
":serialized:": "
|
59 |
-
"achieved_goal": "[[0.
|
60 |
-
"desired_goal": "[[
|
61 |
-
"observation": "[[ 0.
|
62 |
},
|
63 |
"_last_episode_starts": {
|
64 |
":type:": "<class 'numpy.ndarray'>",
|
@@ -66,9 +66,9 @@
|
|
66 |
},
|
67 |
"_last_original_obs": {
|
68 |
":type:": "<class 'collections.OrderedDict'>",
|
69 |
-
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
70 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
71 |
-
"desired_goal": "[[ 0.
|
72 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
73 |
},
|
74 |
"_episode_num": 0,
|
@@ -77,7 +77,7 @@
|
|
77 |
"_current_progress_remaining": 0.0,
|
78 |
"ep_info_buffer": {
|
79 |
":type:": "<class 'collections.deque'>",
|
80 |
-
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
81 |
},
|
82 |
"ep_success_buffer": {
|
83 |
":type:": "<class 'collections.deque'>",
|
|
|
4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7fcb9295b3a0>",
|
8 |
"__abstractmethods__": "frozenset()",
|
9 |
+
"_abc_impl": "<_abc_data object at 0x7fcb92952a50>"
|
10 |
},
|
11 |
"verbose": 1,
|
12 |
"policy_kwargs": {
|
|
|
46 |
"_num_timesteps_at_start": 0,
|
47 |
"seed": null,
|
48 |
"action_noise": null,
|
49 |
+
"start_time": 1677961605429071276,
|
50 |
"learning_rate": 0.0007,
|
51 |
"tensorboard_log": null,
|
52 |
"lr_schedule": {
|
|
|
55 |
},
|
56 |
"_last_obs": {
|
57 |
":type:": "<class 'collections.OrderedDict'>",
|
58 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAfPnTPj13ozru4xE/fPnTPj13ozru4xE/fPnTPj13ozru4xE/fPnTPj13ozru4xE/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAv5t+v3N3hbx6/GQ/GmRWPzmuPr/xvF4/CnyuP4DAA788g4U/eEXXPwhj9r4fS4G/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAAB8+dM+PXejOu7jET/mYK08Ef82uwFTiDx8+dM+PXejOu7jET/mYK08Ef82uwFTiDx8+dM+PXejOu7jET/mYK08Ef82uwFTiDx8+dM+PXejOu7jET/mYK08Ef82uwFTiDyUaA5LBEsGhpRoEnSUUpR1Lg==",
|
59 |
+
"achieved_goal": "[[0.4140128 0.00124714 0.5698842 ]\n [0.4140128 0.00124714 0.5698842 ]\n [0.4140128 0.00124714 0.5698842 ]\n [0.4140128 0.00124714 0.5698842 ]]",
|
60 |
+
"desired_goal": "[[-0.994564 -0.01629231 0.8944775 ]\n [ 0.8374649 -0.7448459 0.8700705 ]\n [ 1.3631604 -0.51465607 1.0430675 ]\n [ 1.6818075 -0.4812243 -1.010105 ]]",
|
61 |
+
"observation": "[[ 0.4140128 0.00124714 0.5698842 0.02116437 -0.0027923 0.01664114]\n [ 0.4140128 0.00124714 0.5698842 0.02116437 -0.0027923 0.01664114]\n [ 0.4140128 0.00124714 0.5698842 0.02116437 -0.0027923 0.01664114]\n [ 0.4140128 0.00124714 0.5698842 0.02116437 -0.0027923 0.01664114]]"
|
62 |
},
|
63 |
"_last_episode_starts": {
|
64 |
":type:": "<class 'numpy.ndarray'>",
|
|
|
66 |
},
|
67 |
"_last_original_obs": {
|
68 |
":type:": "<class 'collections.OrderedDict'>",
|
69 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAADpSBPQZyAz5OA1Q+dbU8PQdjQ73deLg94aKNPbZu6j0hIxA++lEMPc9aX71uMmE+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==",
|
70 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
71 |
+
"desired_goal": "[[ 0.06327067 0.12836465 0.20704386]\n [ 0.04607149 -0.04770186 0.09007428]\n [ 0.06915832 0.11446898 0.140759 ]\n [ 0.03425787 -0.05452996 0.21991894]]",
|
72 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
73 |
},
|
74 |
"_episode_num": 0,
|
|
|
77 |
"_current_progress_remaining": 0.0,
|
78 |
"ep_info_buffer": {
|
79 |
":type:": "<class 'collections.deque'>",
|
80 |
+
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIZyYYzjUsA8CUhpRSlIwBbJRLMowBdJRHQKvBhCa7Vax1fZQoaAZoCWgPQwhaKm9HOC0HwJSGlFKUaBVLMmgWR0CrwUEUKzAvdX2UKGgGaAloD0MInYTSF0LOA8CUhpRSlGgVSzJoFkdAq8D64nWrfnV9lChoBmgJaA9DCD/jwoGQrPK/lIaUUpRoFUsyaBZHQKvAuNtIkJN1fZQoaAZoCWgPQwgUl+MViN7wv5SGlFKUaBVLMmgWR0Crw8CbDuSfdX2UKGgGaAloD0MIiLt6FRmd9b+UhpRSlGgVSzJoFkdAq8N/Lkjop3V9lChoBmgJaA9DCNxlv+50Z/q/lIaUUpRoFUsyaBZHQKvDOL8aXKN1fZQoaAZoCWgPQwhn170ViQkAwJSGlFKUaBVLMmgWR0CrwvbgsK9gdX2UKGgGaAloD0MI+yR32EQmBsCUhpRSlGgVSzJoFkdAq8XovtdAxHV9lChoBmgJaA9DCGzrp/+sefq/lIaUUpRoFUsyaBZHQKvFpb8m8dx1fZQoaAZoCWgPQwigppat9UXYv5SGlFKUaBVLMmgWR0CrxV8sMAmzdX2UKGgGaAloD0MIJCao4VuY/b+UhpRSlGgVSzJoFkdAq8Udg+hXbXV9lChoBmgJaA9DCOF/K9mxkeG/lIaUUpRoFUsyaBZHQKvIMiBXjlx1fZQoaAZoCWgPQwjdXWdD/tnzv5SGlFKUaBVLMmgWR0Crx+8TBZZCdX2UKGgGaAloD0MIt7dbkgM2+r+UhpRSlGgVSzJoFkdAq8eorpaA4HV9lChoBmgJaA9DCKFpiZXRyO2/lIaUUpRoFUsyaBZHQKvHZyOq//N1fZQoaAZoCWgPQwhStkjajb7qv5SGlFKUaBVLMmgWR0CrymH0kGA1dX2UKGgGaAloD0MIsdtnlZmS9r+UhpRSlGgVSzJoFkdAq8oe9WZJCnV9lChoBmgJaA9DCGechqjCH/m/lIaUUpRoFUsyaBZHQKvJ2KKpDNR1fZQoaAZoCWgPQwh0JQLVP4j5v5SGlFKUaBVLMmgWR0CryZa/7BO6dX2UKGgGaAloD0MIQURq2sW05r+UhpRSlGgVSzJoFkdAq8vpTqB3A3V9lChoBmgJaA9DCJPjTulgffa/lIaUUpRoFUsyaBZHQKvLpTxXnyN1fZQoaAZoCWgPQwg6z9iXbLzlv5SGlFKUaBVLMmgWR0Cry13XI2fkdX2UKGgGaAloD0MInGotzEL78r+UhpRSlGgVSzJoFkdAq8sbLSuyNXV9lChoBmgJaA9DCK7TSEvlbeu/lIaUUpRoFUsyaBZHQKvNI5OJtSB1fZQoaAZoCWgPQwhJMNXMWorjv5SGlFKUaBVLMmgWR0CrzN95hSccdX2UKGgGaAloD0MII7pnXaOl/L+UhpRSlGgVSzJoFkdAq8yX1Hvtt3V9lChoBmgJaA9DCPD5YYTwaPu/lIaUUpRoFUsyaBZHQKvMVRx95Qh1fZQoaAZoCWgPQwhVwhN6/cnnv5SGlFKUaBVLMmgWR0CrzlntOVPfdX2UKGgGaAloD0MInBpoPufu+L+UhpRSlGgVSzJoFkdAq84WBQN1AHV9lChoBmgJaA9DCOli00ohEOm/lIaUUpRoFUsyaBZHQKvNzrB0p3J1fZQoaAZoCWgPQwhwfO2ZJQHjv5SGlFKUaBVLMmgWR0CrzYvlMh5gdX2UKGgGaAloD0MIGY9SCU/o97+UhpRSlGgVSzJoFkdAq8+Sa9bosHV9lChoBmgJaA9DCBFwCFVq9ve/lIaUUpRoFUsyaBZHQKvPTm5lOGl1fZQoaAZoCWgPQwinWaDdIUXyv5SGlFKUaBVLMmgWR0Crzwb5Ec81dX2UKGgGaAloD0MI9nr3x3tV7r+UhpRSlGgVSzJoFkdAq87EMd92HXV9lChoBmgJaA9DCFckJqjhW+a/lIaUUpRoFUsyaBZHQKvQxwazeGh1fZQoaAZoCWgPQwgZkL3e/TH+v5SGlFKUaBVLMmgWR0Cr0IMNUfgadX2UKGgGaAloD0MInRIQk3Dh/L+UhpRSlGgVSzJoFkdAq9A7jLjgh3V9lChoBmgJaA9DCMRA176Anvm/lIaUUpRoFUsyaBZHQKvP+Ln9vTB1fZQoaAZoCWgPQwg7cTlegWj5v5SGlFKUaBVLMmgWR0Cr0gVaW5YpdX2UKGgGaAloD0MIkSdJ10y+6r+UhpRSlGgVSzJoFkdAq9HBP0qYq3V9lChoBmgJaA9DCACrI0c6A++/lIaUUpRoFUsyaBZHQKvRean75211fZQoaAZoCWgPQwhLdQEvM+z7v5SGlFKUaBVLMmgWR0Cr0Ta7dznzdX2UKGgGaAloD0MIBRcrajDN9L+UhpRSlGgVSzJoFkdAq9Mvt0FKTXV9lChoBmgJaA9DCIgrZ++MtvC/lIaUUpRoFUsyaBZHQKvS665oXbd1fZQoaAZoCWgPQwj76T9rfjz1v5SGlFKUaBVLMmgWR0Cr0qQfhddFdX2UKGgGaAloD0MIlGx1OSWg7L+UhpRSlGgVSzJoFkdAq9JhyOq//XV9lChoBmgJaA9DCCCZDp2ed9W/lIaUUpRoFUsyaBZHQKvUXTIeYD11fZQoaAZoCWgPQwhBZfz7jIv2v5SGlFKUaBVLMmgWR0Cr1Bkona37dX2UKGgGaAloD0MIsmX5ugz/7b+UhpRSlGgVSzJoFkdAq9PRrP+n63V9lChoBmgJaA9DCMcuUb01cP2/lIaUUpRoFUsyaBZHQKvTjxlQMx51fZQoaAZoCWgPQwgwRiQKLWv4v5SGlFKUaBVLMmgWR0Cr1ZTch1TzdX2UKGgGaAloD0MIPlxy3Cmd97+UhpRSlGgVSzJoFkdAq9VQrOJLunV9lChoBmgJaA9DCAxzgjY5/OC/lIaUUpRoFUsyaBZHQKvVCRFI/aB1fZQoaAZoCWgPQwjO3hltVRLkv5SGlFKUaBVLMmgWR0Cr1MYQrc0tdX2UKGgGaAloD0MIQFBu2/co6r+UhpRSlGgVSzJoFkdAq9bCRbKRuHV9lChoBmgJaA9DCEs/4ezWsum/lIaUUpRoFUsyaBZHQKvWfhttQ9B1fZQoaAZoCWgPQwhRvwtbs5Xxv5SGlFKUaBVLMmgWR0Cr1jaEal1sdX2UKGgGaAloD0MIvFetTPgl77+UhpRSlGgVSzJoFkdAq9XzhBJI2HV9lChoBmgJaA9DCMLB3sSQHO2/lIaUUpRoFUsyaBZHQKvYAOPvKEF1fZQoaAZoCWgPQwiXN4drtQfuv5SGlFKUaBVLMmgWR0Cr17ykbgjydX2UKGgGaAloD0MIPNujN9xH57+UhpRSlGgVSzJoFkdAq9d1AiV0LnV9lChoBmgJaA9DCCY3iqw1FOW/lIaUUpRoFUsyaBZHQKvXMh0yP+51fZQoaAZoCWgPQwjpCyHn/X/wv5SGlFKUaBVLMmgWR0Cr2US5qdpZdX2UKGgGaAloD0MI/ilVouyt77+UhpRSlGgVSzJoFkdAq9kAnOSntXV9lChoBmgJaA9DCL+ByY0ia+u/lIaUUpRoFUsyaBZHQKvYuS13MZB1fZQoaAZoCWgPQwjABdmyfF31v5SGlFKUaBVLMmgWR0Cr2HZb6guidX2UKGgGaAloD0MI8iiV8ITe5L+UhpRSlGgVSzJoFkdAq9pxx7zClHV9lChoBmgJaA9DCEs7NZcbzPO/lIaUUpRoFUsyaBZHQKvaLa24NI91fZQoaAZoCWgPQwiuf9dnznryv5SGlFKUaBVLMmgWR0Cr2eYjrzGxdX2UKGgGaAloD0MIr5l8s83N9b+UhpRSlGgVSzJoFkdAq9mjZnL7oHV9lChoBmgJaA9DCGoX00z3uvO/lIaUUpRoFUsyaBZHQKvbqte2NNt1fZQoaAZoCWgPQwiHokCfyFPyv5SGlFKUaBVLMmgWR0Cr22aqjrRjdX2UKGgGaAloD0MI51PHKqXn/7+UhpRSlGgVSzJoFkdAq9sfiBGx2XV9lChoBmgJaA9DCFiR0QFJWO+/lIaUUpRoFUsyaBZHQKva3UNrj5t1fZQoaAZoCWgPQwiOQLyuX/Dwv5SGlFKUaBVLMmgWR0Cr3OMfq5bydX2UKGgGaAloD0MI+BqC4zIu9L+UhpRSlGgVSzJoFkdAq9yfLcKw6nV9lChoBmgJaA9DCPYINUOqKNK/lIaUUpRoFUsyaBZHQKvcV9Nvfj11fZQoaAZoCWgPQwh9zt2ulyb6v5SGlFKUaBVLMmgWR0Cr3BUhV2iddX2UKGgGaAloD0MIwCUA/5Sq77+UhpRSlGgVSzJoFkdAq94mP/7zkXV9lChoBmgJaA9DCDtwzojSXuq/lIaUUpRoFUsyaBZHQKvd4jdHlOp1fZQoaAZoCWgPQwgvbM1WXvLmv5SGlFKUaBVLMmgWR0Cr3ZrYoRZmdX2UKGgGaAloD0MI7rPKTGm9/7+UhpRSlGgVSzJoFkdAq91YHC4z8HV9lChoBmgJaA9DCCAL0SFw5Pi/lIaUUpRoFUsyaBZHQKvfiLaVUuN1fZQoaAZoCWgPQwgnhuRk4tbsv5SGlFKUaBVLMmgWR0Cr30W7OE/TdX2UKGgGaAloD0MIFNGvrZ/+27+UhpRSlGgVSzJoFkdAq97+8Empl3V9lChoBmgJaA9DCGZK628JgPa/lIaUUpRoFUsyaBZHQKvevGJemel1fZQoaAZoCWgPQwgL0oxF09nlv5SGlFKUaBVLMmgWR0Cr4Zsbm2b5dX2UKGgGaAloD0MIXFX2XRF89L+UhpRSlGgVSzJoFkdAq+FYKKHfuXV9lChoBmgJaA9DCAXhCijUk/G/lIaUUpRoFUsyaBZHQKvhEeNDMNd1fZQoaAZoCWgPQwhw0clS6z34v5SGlFKUaBVLMmgWR0Cr4NA6EJ0GdX2UKGgGaAloD0MI/IwLB0Ky5b+UhpRSlGgVSzJoFkdAq+OqTSsr/nV9lChoBmgJaA9DCOnUlc/yPO+/lIaUUpRoFUsyaBZHQKvjaLux8lZ1fZQoaAZoCWgPQwhLyAc9m1Xzv5SGlFKUaBVLMmgWR0Cr4yIgmqo7dX2UKGgGaAloD0MIPs3Ji0zAAMCUhpRSlGgVSzJoFkdAq+LgrhBJI3V9lChoBmgJaA9DCG/x8J4DS+e/lIaUUpRoFUsyaBZHQKvlyzHCGet1fZQoaAZoCWgPQwhDjq1nCEf4v5SGlFKUaBVLMmgWR0Cr5YhGx2SudX2UKGgGaAloD0MIDYy8rImF8b+UhpRSlGgVSzJoFkdAq+VBfQa73HV9lChoBmgJaA9DCGcsms5ORvG/lIaUUpRoFUsyaBZHQKvk/9tuUEB1ZS4="
|
81 |
},
|
82 |
"ep_success_buffer": {
|
83 |
":type:": "<class 'collections.deque'>",
|
a2c-PandaReachDense-v2/policy.optimizer.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 44734
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d5c13274e0f9c6f97c6d07d26fe15530f4fa1bce0b388ca41679392765c3b729
|
3 |
size 44734
|
a2c-PandaReachDense-v2/policy.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 46014
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ff3cb85a58a3643afea1ee7e15d1abd57a19eefccfb10fcee103cda6be1d521f
|
3 |
size 46014
|
config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7fbbb9d07040>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc_data object at 0x7fbbb9d7fb10>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 1000000, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1677558034116402596, "learning_rate": 0.0007, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/RvAGjbi6x4WUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAttBlPlQBEj76wBY9ttBlPlQBEj76wBY9ttBlPlQBEj76wBY9ttBlPlQBEj76wBY9lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAA74eZPil8kz/ii4m/RvCLv1uBLj9gHUQ++j9Cv4wDlL5eLAY/mz5eP31BKz8LIdC/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAAC20GU+VAESPvrAFj2uE449wUJJOxCBN7y20GU+VAESPvrAFj2uE449wUJJOxCBN7y20GU+VAESPvrAFj2uE449wUJJOxCBN7y20GU+VAESPvrAFj2uE449wUJJOxCBN7yUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[0.22442898 0.14258319 0.03680513]\n [0.22442898 0.14258319 0.03680513]\n [0.22442898 0.14258319 0.03680513]\n [0.22442898 0.14258319 0.03680513]]", "desired_goal": "[[ 0.29986522 1.1522266 -1.0745814 ]\n [-1.0932701 0.6816613 0.1915183 ]\n [-0.7587887 -0.28908956 0.5241145 ]\n [ 0.8681428 0.668968 -1.6260084 ]]", "observation": "[[ 0.22442898 0.14258319 0.03680513 0.06937347 0.003071 -0.0112002 ]\n [ 0.22442898 0.14258319 0.03680513 0.06937347 0.003071 -0.0112002 ]\n [ 0.22442898 0.14258319 0.03680513 0.06937347 0.003071 -0.0112002 ]\n [ 0.22442898 0.14258319 0.03680513 0.06937347 0.003071 -0.0112002 ]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAAFwIPRqqC732X1k+bCUGPhnV+L0x7Xo+R+8Avqkhxj1FUwM+5UyuPf6naL0GzlA9lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[ 0.03329086 -0.03409777 0.21228012]\n [ 0.13100213 -0.1215002 0.24504544]\n [-0.12591277 0.09674389 0.12824734]\n [ 0.0851076 -0.05680083 0.05097773]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIis3HtaHiPMCUhpRSlIwBbJRLMowBdJRHQKn+yXfIjnp1fZQoaAZoCWgPQwhpUgq6vVQowJSGlFKUaBVLMmgWR0Cp/oxDTjNqdX2UKGgGaAloD0MIL2zNVl4+MMCUhpRSlGgVSzJoFkdAqf5NvhqCYnV9lChoBmgJaA9DCAu2EU92qyzAlIaUUpRoFUsyaBZHQKn+DI8QqZt1fZQoaAZoCWgPQwgGnKVkOZkawJSGlFKUaBVLMmgWR0CqAFCHARChdX2UKGgGaAloD0MIKEUr9wIDH8CUhpRSlGgVSzJoFkdAqgATTYukDnV9lChoBmgJaA9DCPjhICHKXynAlIaUUpRoFUsyaBZHQKn/1YW+GoJ1fZQoaAZoCWgPQwheDybFx8cjwJSGlFKUaBVLMmgWR0Cp/5QcxTKldX2UKGgGaAloD0MIBHY1eco+McCUhpRSlGgVSzJoFkdAqgIRf+jubHV9lChoBmgJaA9DCIbLKmwGuC3AlIaUUpRoFUsyaBZHQKoB1Jsfq5d1fZQoaAZoCWgPQwheZtgo6682wJSGlFKUaBVLMmgWR0CqAZf4IrvtdX2UKGgGaAloD0MInlxTILPzJMCUhpRSlGgVSzJoFkdAqgFWlfqoqHV9lChoBmgJaA9DCE8GR8mrGy3AlIaUUpRoFUsyaBZHQKoEInUDuBt1fZQoaAZoCWgPQwgke4SaIdUowJSGlFKUaBVLMmgWR0CqA+VObiIddX2UKGgGaAloD0MIXg8mxcfPMcCUhpRSlGgVSzJoFkdAqgOobKifx3V9lChoBmgJaA9DCH3MBwQ6SznAlIaUUpRoFUsyaBZHQKoDaXP7el91fZQoaAZoCWgPQwhrYKsEi3MwwJSGlFKUaBVLMmgWR0CqBgF/pdKNdX2UKGgGaAloD0MIh/wzg/gQLcCUhpRSlGgVSzJoFkdAqgXEH0K7ZnV9lChoBmgJaA9DCHSV7q6zcTDAlIaUUpRoFUsyaBZHQKoFhu5SWJJ1fZQoaAZoCWgPQwjS/DGtTV8xwJSGlFKUaBVLMmgWR0CqBUTpxFRYdX2UKGgGaAloD0MIHERrRZsjH8CUhpRSlGgVSzJoFkdAqgc8B0ZFX3V9lChoBmgJaA9DCCxHyECe7S/AlIaUUpRoFUsyaBZHQKoG/uZThpB1fZQoaAZoCWgPQwh39SoyOuAmwJSGlFKUaBVLMmgWR0CqBr+/xlQNdX2UKGgGaAloD0MI5BQdyeVnOcCUhpRSlGgVSzJoFkdAqgZ+qPwNLHV9lChoBmgJaA9DCMOC+wEPpDHAlIaUUpRoFUsyaBZHQKoIfFLnLaF1fZQoaAZoCWgPQwjMuKmB5iMuwJSGlFKUaBVLMmgWR0CqCD52hZhbdX2UKGgGaAloD0MIRYKpZtauNMCUhpRSlGgVSzJoFkdAqggAQcxTKnV9lChoBmgJaA9DCF97ZkmA+iXAlIaUUpRoFUsyaBZHQKoHve9Ba9t1fZQoaAZoCWgPQwg4onvWNQIzwJSGlFKUaBVLMmgWR0CqCZByKekIdX2UKGgGaAloD0MISBYwgVsrNsCUhpRSlGgVSzJoFkdAqglTmEGqxXV9lChoBmgJaA9DCJ0std5vNCnAlIaUUpRoFUsyaBZHQKoJFISUTtd1fZQoaAZoCWgPQwgcXhCRmg4owJSGlFKUaBVLMmgWR0CqCNJP69CedX2UKGgGaAloD0MIshNeglPXOsCUhpRSlGgVSzJoFkdAqgrrTlT3qXV9lChoBmgJaA9DCM1WXvI/QSvAlIaUUpRoFUsyaBZHQKoKrVktmL91fZQoaAZoCWgPQwjrjzAMWIo7wJSGlFKUaBVLMmgWR0CqCm9RJmNBdX2UKGgGaAloD0MIaAQb17+LL8CUhpRSlGgVSzJoFkdAqgot6HCXQnV9lChoBmgJaA9DCJNUppiD+DLAlIaUUpRoFUsyaBZHQKoMDk5IYm91fZQoaAZoCWgPQwg1JVmHo5suwJSGlFKUaBVLMmgWR0CqC9B7VrhzdX2UKGgGaAloD0MIKc3mcRisH8CUhpRSlGgVSzJoFkdAqguRVKf4AXV9lChoBmgJaA9DCGu3XWiumzfAlIaUUpRoFUsyaBZHQKoLUBtDUmV1fZQoaAZoCWgPQwgR/dr66UszwJSGlFKUaBVLMmgWR0CqDR0YbbUPdX2UKGgGaAloD0MIecn/5O9eM8CUhpRSlGgVSzJoFkdAqgzgJ5VwP3V9lChoBmgJaA9DCBGq1OyBJiLAlIaUUpRoFUsyaBZHQKoMoQkHD791fZQoaAZoCWgPQwh1OSUgJtEmwJSGlFKUaBVLMmgWR0CqDF7D/EOzdX2UKGgGaAloD0MIiQyreCOTIMCUhpRSlGgVSzJoFkdAqg40/wAlwHV9lChoBmgJaA9DCDBLOzWXexrAlIaUUpRoFUsyaBZHQKoN9xEORT11fZQoaAZoCWgPQwjRXKeRltIjwJSGlFKUaBVLMmgWR0CqDbgHu7YkdX2UKGgGaAloD0MI6nqi68IHNsCUhpRSlGgVSzJoFkdAqg13OMVDbHV9lChoBmgJaA9DCAIpsWt7+zLAlIaUUpRoFUsyaBZHQKoPe1Gb1AZ1fZQoaAZoCWgPQwgKaY1BJ2w4wJSGlFKUaBVLMmgWR0CqDz1fVqetdX2UKGgGaAloD0MIc4V3uYgHJsCUhpRSlGgVSzJoFkdAqg7+Q6p5vHV9lChoBmgJaA9DCA3k2eVbTzHAlIaUUpRoFUsyaBZHQKoOvR/mT1V1fZQoaAZoCWgPQwjT2cngKJkawJSGlFKUaBVLMmgWR0CqEKBxo7FLdX2UKGgGaAloD0MIAiuHFtlSNcCUhpRSlGgVSzJoFkdAqhBkclw97nV9lChoBmgJaA9DCC0j9Z7KiRHAlIaUUpRoFUsyaBZHQKoQJVhkRSR1fZQoaAZoCWgPQwhXeQJhp6hDwJSGlFKUaBVLMmgWR0CqD+N2s7uEdX2UKGgGaAloD0MIoBuastM3IcCUhpRSlGgVSzJoFkdAqhGdLi++NHV9lChoBmgJaA9DCOF5qdiYFyPAlIaUUpRoFUsyaBZHQKoRXyxRl6J1fZQoaAZoCWgPQwjGGFjH8bsowJSGlFKUaBVLMmgWR0CqER/KZDzAdX2UKGgGaAloD0MIeXb51ocdLcCUhpRSlGgVSzJoFkdAqhDeQhfShXV9lChoBmgJaA9DCI178xsmujDAlIaUUpRoFUsyaBZHQKoS9qmCROl1fZQoaAZoCWgPQwivl6YIcGIzwJSGlFKUaBVLMmgWR0CqEro3Jgb7dX2UKGgGaAloD0MIwM+4cCB0JcCUhpRSlGgVSzJoFkdAqhJ7GWD6FnV9lChoBmgJaA9DCENZ+PpakzLAlIaUUpRoFUsyaBZHQKoSOg2606Z1fZQoaAZoCWgPQwhoklhS7hYpwJSGlFKUaBVLMmgWR0CqFCdBBzFNdX2UKGgGaAloD0MIPsqIC0BLMsCUhpRSlGgVSzJoFkdAqhPqjpLVWnV9lChoBmgJaA9DCCaMZmX7fDLAlIaUUpRoFUsyaBZHQKoTqzsQd0d1fZQoaAZoCWgPQwhl48EWu7E3wJSGlFKUaBVLMmgWR0CqE2nNxEORdX2UKGgGaAloD0MIyLYMOEsxJ8CUhpRSlGgVSzJoFkdAqhVcfvF3p3V9lChoBmgJaA9DCF6hD5axJTvAlIaUUpRoFUsyaBZHQKoVH1GLDQ91fZQoaAZoCWgPQwjRPIBFfgEywJSGlFKUaBVLMmgWR0CqFOD0UXYUdX2UKGgGaAloD0MI31M57SlpKcCUhpRSlGgVSzJoFkdAqhSeys0YTHV9lChoBmgJaA9DCHaJ6q2BTS/AlIaUUpRoFUsyaBZHQKoWWVSGahJ1fZQoaAZoCWgPQwg5s12hD04uwJSGlFKUaBVLMmgWR0CqFhtVBD5TdX2UKGgGaAloD0MIQX42ct3ULcCUhpRSlGgVSzJoFkdAqhXdLxqfvnV9lChoBmgJaA9DCIJvmj47sCXAlIaUUpRoFUsyaBZHQKoVmvM8ox51fZQoaAZoCWgPQwjvxoLCoNwnwJSGlFKUaBVLMmgWR0CqF1WIXTEzdX2UKGgGaAloD0MIrpy9M9raNsCUhpRSlGgVSzJoFkdAqhcXuw5eaHV9lChoBmgJaA9DCEbRAx+DNTXAlIaUUpRoFUsyaBZHQKoW2MQ2/BZ1fZQoaAZoCWgPQwiNR6mEJ8QpwJSGlFKUaBVLMmgWR0CqFpaGpMpPdX2UKGgGaAloD0MITrfsEP88MsCUhpRSlGgVSzJoFkdAqhhXnfVI7XV9lChoBmgJaA9DCNgubTgsNSXAlIaUUpRoFUsyaBZHQKoYGbm2b5N1fZQoaAZoCWgPQwgbYrzmVZEywJSGlFKUaBVLMmgWR0CqF9qUVzp5dX2UKGgGaAloD0MIEtkHWRY0MsCUhpRSlGgVSzJoFkdAqheYnhKlHnV9lChoBmgJaA9DCLuAlxk2yiDAlIaUUpRoFUsyaBZHQKoZb2mHgxd1fZQoaAZoCWgPQwg26iEa3WkqwJSGlFKUaBVLMmgWR0CqGTJk5IYndX2UKGgGaAloD0MIsrj/yHSAJMCUhpRSlGgVSzJoFkdAqhjzT4L1EnV9lChoBmgJaA9DCGQfZFkw3TTAlIaUUpRoFUsyaBZHQKoYsOz6ab51fZQoaAZoCWgPQwjO/GoOENw4wJSGlFKUaBVLMmgWR0CqGvO/cnE3dX2UKGgGaAloD0MIkC+hgsOjJsCUhpRSlGgVSzJoFkdAqhq2XgLqlnV9lChoBmgJaA9DCMmvH2KDTSjAlIaUUpRoFUsyaBZHQKoad9ORDCx1fZQoaAZoCWgPQwiCrn0BveAcwJSGlFKUaBVLMmgWR0CqGjY/3WWhdX2UKGgGaAloD0MI32xzY3ruMMCUhpRSlGgVSzJoFkdAqhy2+wkgOnV9lChoBmgJaA9DCCFZwARuTRzAlIaUUpRoFUsyaBZHQKocefqX4TN1fZQoaAZoCWgPQwgSwM3ixeojwJSGlFKUaBVLMmgWR0CqHDvDP4VRdX2UKGgGaAloD0MIz02bcRoiFcCUhpRSlGgVSzJoFkdAqhv6O7xusXV9lChoBmgJaA9DCA3GiEShNS7AlIaUUpRoFUsyaBZHQKoeWGJN0vJ1fZQoaAZoCWgPQwghc2VQbZAfwJSGlFKUaBVLMmgWR0CqHhscZLqVdX2UKGgGaAloD0MI8L4qFypPJ8CUhpRSlGgVSzJoFkdAqh3clLOAy3V9lChoBmgJaA9DCI0o7Q2+SCTAlIaUUpRoFUsyaBZHQKodmtlqagF1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 50000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.29 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.8.10", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
|
|
1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7fcb9295b3a0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc_data object at 0x7fcb92952a50>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 1000000, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1677961605429071276, "learning_rate": 0.0007, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/RvAGjbi6x4WUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAfPnTPj13ozru4xE/fPnTPj13ozru4xE/fPnTPj13ozru4xE/fPnTPj13ozru4xE/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAv5t+v3N3hbx6/GQ/GmRWPzmuPr/xvF4/CnyuP4DAA788g4U/eEXXPwhj9r4fS4G/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAAB8+dM+PXejOu7jET/mYK08Ef82uwFTiDx8+dM+PXejOu7jET/mYK08Ef82uwFTiDx8+dM+PXejOu7jET/mYK08Ef82uwFTiDx8+dM+PXejOu7jET/mYK08Ef82uwFTiDyUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[0.4140128 0.00124714 0.5698842 ]\n [0.4140128 0.00124714 0.5698842 ]\n [0.4140128 0.00124714 0.5698842 ]\n [0.4140128 0.00124714 0.5698842 ]]", "desired_goal": "[[-0.994564 -0.01629231 0.8944775 ]\n [ 0.8374649 -0.7448459 0.8700705 ]\n [ 1.3631604 -0.51465607 1.0430675 ]\n [ 1.6818075 -0.4812243 -1.010105 ]]", "observation": "[[ 0.4140128 0.00124714 0.5698842 0.02116437 -0.0027923 0.01664114]\n [ 0.4140128 0.00124714 0.5698842 0.02116437 -0.0027923 0.01664114]\n [ 0.4140128 0.00124714 0.5698842 0.02116437 -0.0027923 0.01664114]\n [ 0.4140128 0.00124714 0.5698842 0.02116437 -0.0027923 0.01664114]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAADpSBPQZyAz5OA1Q+dbU8PQdjQ73deLg94aKNPbZu6j0hIxA++lEMPc9aX71uMmE+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[ 0.06327067 0.12836465 0.20704386]\n [ 0.04607149 -0.04770186 0.09007428]\n [ 0.06915832 0.11446898 0.140759 ]\n [ 0.03425787 -0.05452996 0.21991894]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIZyYYzjUsA8CUhpRSlIwBbJRLMowBdJRHQKvBhCa7Vax1fZQoaAZoCWgPQwhaKm9HOC0HwJSGlFKUaBVLMmgWR0CrwUEUKzAvdX2UKGgGaAloD0MInYTSF0LOA8CUhpRSlGgVSzJoFkdAq8D64nWrfnV9lChoBmgJaA9DCD/jwoGQrPK/lIaUUpRoFUsyaBZHQKvAuNtIkJN1fZQoaAZoCWgPQwgUl+MViN7wv5SGlFKUaBVLMmgWR0Crw8CbDuSfdX2UKGgGaAloD0MIiLt6FRmd9b+UhpRSlGgVSzJoFkdAq8N/Lkjop3V9lChoBmgJaA9DCNxlv+50Z/q/lIaUUpRoFUsyaBZHQKvDOL8aXKN1fZQoaAZoCWgPQwhn170ViQkAwJSGlFKUaBVLMmgWR0CrwvbgsK9gdX2UKGgGaAloD0MI+yR32EQmBsCUhpRSlGgVSzJoFkdAq8XovtdAxHV9lChoBmgJaA9DCGzrp/+sefq/lIaUUpRoFUsyaBZHQKvFpb8m8dx1fZQoaAZoCWgPQwigppat9UXYv5SGlFKUaBVLMmgWR0CrxV8sMAmzdX2UKGgGaAloD0MIJCao4VuY/b+UhpRSlGgVSzJoFkdAq8Udg+hXbXV9lChoBmgJaA9DCOF/K9mxkeG/lIaUUpRoFUsyaBZHQKvIMiBXjlx1fZQoaAZoCWgPQwjdXWdD/tnzv5SGlFKUaBVLMmgWR0Crx+8TBZZCdX2UKGgGaAloD0MIt7dbkgM2+r+UhpRSlGgVSzJoFkdAq8eorpaA4HV9lChoBmgJaA9DCKFpiZXRyO2/lIaUUpRoFUsyaBZHQKvHZyOq//N1fZQoaAZoCWgPQwhStkjajb7qv5SGlFKUaBVLMmgWR0CrymH0kGA1dX2UKGgGaAloD0MIsdtnlZmS9r+UhpRSlGgVSzJoFkdAq8oe9WZJCnV9lChoBmgJaA9DCGechqjCH/m/lIaUUpRoFUsyaBZHQKvJ2KKpDNR1fZQoaAZoCWgPQwh0JQLVP4j5v5SGlFKUaBVLMmgWR0CryZa/7BO6dX2UKGgGaAloD0MIQURq2sW05r+UhpRSlGgVSzJoFkdAq8vpTqB3A3V9lChoBmgJaA9DCJPjTulgffa/lIaUUpRoFUsyaBZHQKvLpTxXnyN1fZQoaAZoCWgPQwg6z9iXbLzlv5SGlFKUaBVLMmgWR0Cry13XI2fkdX2UKGgGaAloD0MInGotzEL78r+UhpRSlGgVSzJoFkdAq8sbLSuyNXV9lChoBmgJaA9DCK7TSEvlbeu/lIaUUpRoFUsyaBZHQKvNI5OJtSB1fZQoaAZoCWgPQwhJMNXMWorjv5SGlFKUaBVLMmgWR0CrzN95hSccdX2UKGgGaAloD0MII7pnXaOl/L+UhpRSlGgVSzJoFkdAq8yX1Hvtt3V9lChoBmgJaA9DCPD5YYTwaPu/lIaUUpRoFUsyaBZHQKvMVRx95Qh1fZQoaAZoCWgPQwhVwhN6/cnnv5SGlFKUaBVLMmgWR0CrzlntOVPfdX2UKGgGaAloD0MInBpoPufu+L+UhpRSlGgVSzJoFkdAq84WBQN1AHV9lChoBmgJaA9DCOli00ohEOm/lIaUUpRoFUsyaBZHQKvNzrB0p3J1fZQoaAZoCWgPQwhwfO2ZJQHjv5SGlFKUaBVLMmgWR0CrzYvlMh5gdX2UKGgGaAloD0MIGY9SCU/o97+UhpRSlGgVSzJoFkdAq8+Sa9bosHV9lChoBmgJaA9DCBFwCFVq9ve/lIaUUpRoFUsyaBZHQKvPTm5lOGl1fZQoaAZoCWgPQwinWaDdIUXyv5SGlFKUaBVLMmgWR0Crzwb5Ec81dX2UKGgGaAloD0MI9nr3x3tV7r+UhpRSlGgVSzJoFkdAq87EMd92HXV9lChoBmgJaA9DCFckJqjhW+a/lIaUUpRoFUsyaBZHQKvQxwazeGh1fZQoaAZoCWgPQwgZkL3e/TH+v5SGlFKUaBVLMmgWR0Cr0IMNUfgadX2UKGgGaAloD0MInRIQk3Dh/L+UhpRSlGgVSzJoFkdAq9A7jLjgh3V9lChoBmgJaA9DCMRA176Anvm/lIaUUpRoFUsyaBZHQKvP+Ln9vTB1fZQoaAZoCWgPQwg7cTlegWj5v5SGlFKUaBVLMmgWR0Cr0gVaW5YpdX2UKGgGaAloD0MIkSdJ10y+6r+UhpRSlGgVSzJoFkdAq9HBP0qYq3V9lChoBmgJaA9DCACrI0c6A++/lIaUUpRoFUsyaBZHQKvRean75211fZQoaAZoCWgPQwhLdQEvM+z7v5SGlFKUaBVLMmgWR0Cr0Ta7dznzdX2UKGgGaAloD0MIBRcrajDN9L+UhpRSlGgVSzJoFkdAq9Mvt0FKTXV9lChoBmgJaA9DCIgrZ++MtvC/lIaUUpRoFUsyaBZHQKvS665oXbd1fZQoaAZoCWgPQwj76T9rfjz1v5SGlFKUaBVLMmgWR0Cr0qQfhddFdX2UKGgGaAloD0MIlGx1OSWg7L+UhpRSlGgVSzJoFkdAq9JhyOq//XV9lChoBmgJaA9DCCCZDp2ed9W/lIaUUpRoFUsyaBZHQKvUXTIeYD11fZQoaAZoCWgPQwhBZfz7jIv2v5SGlFKUaBVLMmgWR0Cr1Bkona37dX2UKGgGaAloD0MIsmX5ugz/7b+UhpRSlGgVSzJoFkdAq9PRrP+n63V9lChoBmgJaA9DCMcuUb01cP2/lIaUUpRoFUsyaBZHQKvTjxlQMx51fZQoaAZoCWgPQwgwRiQKLWv4v5SGlFKUaBVLMmgWR0Cr1ZTch1TzdX2UKGgGaAloD0MIPlxy3Cmd97+UhpRSlGgVSzJoFkdAq9VQrOJLunV9lChoBmgJaA9DCAxzgjY5/OC/lIaUUpRoFUsyaBZHQKvVCRFI/aB1fZQoaAZoCWgPQwjO3hltVRLkv5SGlFKUaBVLMmgWR0Cr1MYQrc0tdX2UKGgGaAloD0MIQFBu2/co6r+UhpRSlGgVSzJoFkdAq9bCRbKRuHV9lChoBmgJaA9DCEs/4ezWsum/lIaUUpRoFUsyaBZHQKvWfhttQ9B1fZQoaAZoCWgPQwhRvwtbs5Xxv5SGlFKUaBVLMmgWR0Cr1jaEal1sdX2UKGgGaAloD0MIvFetTPgl77+UhpRSlGgVSzJoFkdAq9XzhBJI2HV9lChoBmgJaA9DCMLB3sSQHO2/lIaUUpRoFUsyaBZHQKvYAOPvKEF1fZQoaAZoCWgPQwiXN4drtQfuv5SGlFKUaBVLMmgWR0Cr17ykbgjydX2UKGgGaAloD0MIPNujN9xH57+UhpRSlGgVSzJoFkdAq9d1AiV0LnV9lChoBmgJaA9DCCY3iqw1FOW/lIaUUpRoFUsyaBZHQKvXMh0yP+51fZQoaAZoCWgPQwjpCyHn/X/wv5SGlFKUaBVLMmgWR0Cr2US5qdpZdX2UKGgGaAloD0MI/ilVouyt77+UhpRSlGgVSzJoFkdAq9kAnOSntXV9lChoBmgJaA9DCL+ByY0ia+u/lIaUUpRoFUsyaBZHQKvYuS13MZB1fZQoaAZoCWgPQwjABdmyfF31v5SGlFKUaBVLMmgWR0Cr2HZb6guidX2UKGgGaAloD0MI8iiV8ITe5L+UhpRSlGgVSzJoFkdAq9pxx7zClHV9lChoBmgJaA9DCEs7NZcbzPO/lIaUUpRoFUsyaBZHQKvaLa24NI91fZQoaAZoCWgPQwiuf9dnznryv5SGlFKUaBVLMmgWR0Cr2eYjrzGxdX2UKGgGaAloD0MIr5l8s83N9b+UhpRSlGgVSzJoFkdAq9mjZnL7oHV9lChoBmgJaA9DCGoX00z3uvO/lIaUUpRoFUsyaBZHQKvbqte2NNt1fZQoaAZoCWgPQwiHokCfyFPyv5SGlFKUaBVLMmgWR0Cr22aqjrRjdX2UKGgGaAloD0MI51PHKqXn/7+UhpRSlGgVSzJoFkdAq9sfiBGx2XV9lChoBmgJaA9DCFiR0QFJWO+/lIaUUpRoFUsyaBZHQKva3UNrj5t1fZQoaAZoCWgPQwiOQLyuX/Dwv5SGlFKUaBVLMmgWR0Cr3OMfq5bydX2UKGgGaAloD0MI+BqC4zIu9L+UhpRSlGgVSzJoFkdAq9yfLcKw6nV9lChoBmgJaA9DCPYINUOqKNK/lIaUUpRoFUsyaBZHQKvcV9Nvfj11fZQoaAZoCWgPQwh9zt2ulyb6v5SGlFKUaBVLMmgWR0Cr3BUhV2iddX2UKGgGaAloD0MIwCUA/5Sq77+UhpRSlGgVSzJoFkdAq94mP/7zkXV9lChoBmgJaA9DCDtwzojSXuq/lIaUUpRoFUsyaBZHQKvd4jdHlOp1fZQoaAZoCWgPQwgvbM1WXvLmv5SGlFKUaBVLMmgWR0Cr3ZrYoRZmdX2UKGgGaAloD0MI7rPKTGm9/7+UhpRSlGgVSzJoFkdAq91YHC4z8HV9lChoBmgJaA9DCCAL0SFw5Pi/lIaUUpRoFUsyaBZHQKvfiLaVUuN1fZQoaAZoCWgPQwgnhuRk4tbsv5SGlFKUaBVLMmgWR0Cr30W7OE/TdX2UKGgGaAloD0MIFNGvrZ/+27+UhpRSlGgVSzJoFkdAq97+8Empl3V9lChoBmgJaA9DCGZK628JgPa/lIaUUpRoFUsyaBZHQKvevGJemel1fZQoaAZoCWgPQwgL0oxF09nlv5SGlFKUaBVLMmgWR0Cr4Zsbm2b5dX2UKGgGaAloD0MIXFX2XRF89L+UhpRSlGgVSzJoFkdAq+FYKKHfuXV9lChoBmgJaA9DCAXhCijUk/G/lIaUUpRoFUsyaBZHQKvhEeNDMNd1fZQoaAZoCWgPQwhw0clS6z34v5SGlFKUaBVLMmgWR0Cr4NA6EJ0GdX2UKGgGaAloD0MI/IwLB0Ky5b+UhpRSlGgVSzJoFkdAq+OqTSsr/nV9lChoBmgJaA9DCOnUlc/yPO+/lIaUUpRoFUsyaBZHQKvjaLux8lZ1fZQoaAZoCWgPQwhLyAc9m1Xzv5SGlFKUaBVLMmgWR0Cr4yIgmqo7dX2UKGgGaAloD0MIPs3Ji0zAAMCUhpRSlGgVSzJoFkdAq+LgrhBJI3V9lChoBmgJaA9DCG/x8J4DS+e/lIaUUpRoFUsyaBZHQKvlyzHCGet1fZQoaAZoCWgPQwhDjq1nCEf4v5SGlFKUaBVLMmgWR0Cr5YhGx2SudX2UKGgGaAloD0MIDYy8rImF8b+UhpRSlGgVSzJoFkdAq+VBfQa73HV9lChoBmgJaA9DCGcsms5ORvG/lIaUUpRoFUsyaBZHQKvk/9tuUEB1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 50000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.29 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.8.10", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
replay.mp4
CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
|
|
results.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"mean_reward": -
|
|
|
1 |
+
{"mean_reward": -1.1563337153755129, "std_reward": 0.32619795864085316, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-03-04T21:26:15.858757"}
|
vec_normalize.pkl
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 3056
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:228463b3ee2e245eb89ddb38e0c44153b9845ebc29b936d9f462bde4894330b7
|
3 |
size 3056
|