Commit
·
900cf84
1
Parent(s):
6ab1947
Initial commit
Browse files- README.md +1 -1
- a2c-PandaReachDense-v2.zip +2 -2
- a2c-PandaReachDense-v2/data +10 -10
- a2c-PandaReachDense-v2/policy.optimizer.pth +1 -1
- a2c-PandaReachDense-v2/policy.pth +1 -1
- config.json +1 -1
- replay.mp4 +0 -0
- results.json +1 -1
- vec_normalize.pkl +1 -1
README.md
CHANGED
@@ -16,7 +16,7 @@ model-index:
|
|
16 |
type: PandaReachDense-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
-
value: -0.
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
|
|
16 |
type: PandaReachDense-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
+
value: -0.47 +/- 0.18
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
a2c-PandaReachDense-v2.zip
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f36cd91fae77dca24d8f852bca637fe62b4deefe6c0da26a364c9164995f7a5a
|
3 |
+
size 109496
|
a2c-PandaReachDense-v2/data
CHANGED
@@ -4,9 +4,9 @@
|
|
4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
-
"__init__": "<function MultiInputActorCriticPolicy.__init__ at
|
8 |
"__abstractmethods__": "frozenset()",
|
9 |
-
"_abc_impl": "<_abc_data object at
|
10 |
},
|
11 |
"verbose": 1,
|
12 |
"policy_kwargs": {
|
@@ -48,7 +48,7 @@
|
|
48 |
"_num_timesteps_at_start": 0,
|
49 |
"seed": null,
|
50 |
"action_noise": null,
|
51 |
-
"start_time":
|
52 |
"learning_rate": 0.00096,
|
53 |
"tensorboard_log": null,
|
54 |
"lr_schedule": {
|
@@ -57,10 +57,10 @@
|
|
57 |
},
|
58 |
"_last_obs": {
|
59 |
":type:": "<class 'collections.OrderedDict'>",
|
60 |
-
":serialized:": "
|
61 |
-
"achieved_goal": "[[0.
|
62 |
-
"desired_goal": "[[
|
63 |
-
"observation": "[[0.
|
64 |
},
|
65 |
"_last_episode_starts": {
|
66 |
":type:": "<class 'numpy.ndarray'>",
|
@@ -68,9 +68,9 @@
|
|
68 |
},
|
69 |
"_last_original_obs": {
|
70 |
":type:": "<class 'collections.OrderedDict'>",
|
71 |
-
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
72 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
73 |
-
"desired_goal": "[[0.
|
74 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
75 |
},
|
76 |
"_episode_num": 0,
|
@@ -79,7 +79,7 @@
|
|
79 |
"_current_progress_remaining": 0.0,
|
80 |
"ep_info_buffer": {
|
81 |
":type:": "<class 'collections.deque'>",
|
82 |
-
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
83 |
},
|
84 |
"ep_success_buffer": {
|
85 |
":type:": "<class 'collections.deque'>",
|
|
|
4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7fc0d02945e0>",
|
8 |
"__abstractmethods__": "frozenset()",
|
9 |
+
"_abc_impl": "<_abc_data object at 0x7fc0d028cae0>"
|
10 |
},
|
11 |
"verbose": 1,
|
12 |
"policy_kwargs": {
|
|
|
48 |
"_num_timesteps_at_start": 0,
|
49 |
"seed": null,
|
50 |
"action_noise": null,
|
51 |
+
"start_time": 1674225358461228826,
|
52 |
"learning_rate": 0.00096,
|
53 |
"tensorboard_log": null,
|
54 |
"lr_schedule": {
|
|
|
57 |
},
|
58 |
"_last_obs": {
|
59 |
":type:": "<class 'collections.OrderedDict'>",
|
60 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAlXCwPvRTwjz4xhw/lXCwPvRTwjz4xhw/lXCwPvRTwjz4xhw/lXCwPvRTwjz4xhw/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAnyYWP7Apu7+xpEW/1aA1P5W/Ub81LeM+c33APQ4CHL9EVcW/4+7aP/SDNz6FWnE/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAACVcLA+9FPCPPjGHD9NCWw9u8jSOvVEhD2VcLA+9FPCPPjGHD9NCWw9u8jSOvVEhD2VcLA+9FPCPPjGHD9NCWw9u8jSOvVEhD2VcLA+9FPCPPjGHD9NCWw9u8jSOvVEhD2UaA5LBEsGhpRoEnSUUpR1Lg==",
|
61 |
+
"achieved_goal": "[[0.34460893 0.02372167 0.612411 ]\n [0.34460893 0.02372167 0.612411 ]\n [0.34460893 0.02372167 0.612411 ]\n [0.34460893 0.02372167 0.612411 ]]",
|
62 |
+
"desired_goal": "[[ 0.5865268 -1.4622097 -0.77204424]\n [ 0.70948535 -0.81932956 0.44370428]\n [ 0.09398928 -0.60940635 -1.5416646 ]\n [ 1.7104152 0.1792143 0.94278747]]",
|
63 |
+
"observation": "[[0.34460893 0.02372167 0.612411 0.05762606 0.00160816 0.06458465]\n [0.34460893 0.02372167 0.612411 0.05762606 0.00160816 0.06458465]\n [0.34460893 0.02372167 0.612411 0.05762606 0.00160816 0.06458465]\n [0.34460893 0.02372167 0.612411 0.05762606 0.00160816 0.06458465]]"
|
64 |
},
|
65 |
"_last_episode_starts": {
|
66 |
":type:": "<class 'numpy.ndarray'>",
|
|
|
68 |
},
|
69 |
"_last_original_obs": {
|
70 |
":type:": "<class 'collections.OrderedDict'>",
|
71 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAUOvZu8JVhLxRwks+n1BEvIQ1ED5biSE+Y1eOvarmpz0TkiI+9T6DvdMGYL0rGqQ9lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==",
|
72 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
73 |
+
"desired_goal": "[[-0.00665037 -0.01615417 0.19898345]\n [-0.01198211 0.14082915 0.15775053]\n [-0.06950261 0.08198293 0.15876035]\n [-0.06408492 -0.05469401 0.08012804]]",
|
74 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
75 |
},
|
76 |
"_episode_num": 0,
|
|
|
79 |
"_current_progress_remaining": 0.0,
|
80 |
"ep_info_buffer": {
|
81 |
":type:": "<class 'collections.deque'>",
|
82 |
+
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIYmngRzVs4r+UhpRSlIwBbJRLMowBdJRHQKPdACih37l1fZQoaAZoCWgPQwiygXSxaaXav5SGlFKUaBVLMmgWR0Cj3MPcafjCdX2UKGgGaAloD0MIrrzkf/J347+UhpRSlGgVSzJoFkdAo9yG3MINVnV9lChoBmgJaA9DCEEQIEPHDtC/lIaUUpRoFUsyaBZHQKPcTGEPDpF1fZQoaAZoCWgPQwi1+uqqQC3nv5SGlFKUaBVLMmgWR0Cj3nJZwGW2dX2UKGgGaAloD0MI5h99k6bB57+UhpRSlGgVSzJoFkdAo943HBDXv3V9lChoBmgJaA9DCEuxo3GoX+O/lIaUUpRoFUsyaBZHQKPd+zQ/oq11fZQoaAZoCWgPQwid19glqrfMv5SGlFKUaBVLMmgWR0Cj3cG8VYZEdX2UKGgGaAloD0MIuyU5YFeT5b+UhpRSlGgVSzJoFkdAo+ANIZqEe3V9lChoBmgJaA9DCAosgCkDB9a/lIaUUpRoFUsyaBZHQKPf0c3l0YF1fZQoaAZoCWgPQwhAa378pUXjv5SGlFKUaBVLMmgWR0Cj35XPZ7HAdX2UKGgGaAloD0MILudSXFV257+UhpRSlGgVSzJoFkdAo99b39JjD3V9lChoBmgJaA9DCBmuDoC4K+W/lIaUUpRoFUsyaBZHQKPhvWqcVgx1fZQoaAZoCWgPQwiBCkeQSjHnv5SGlFKUaBVLMmgWR0Cj4YKTSsr/dX2UKGgGaAloD0MIKgMHtHQF2b+UhpRSlGgVSzJoFkdAo+FGcc2itnV9lChoBmgJaA9DCDbNO07REeO/lIaUUpRoFUsyaBZHQKPhDOIqLCN1fZQoaAZoCWgPQwgD6zh+qDTfv5SGlFKUaBVLMmgWR0Cj41tW2gFpdX2UKGgGaAloD0MIhSLdzylI47+UhpRSlGgVSzJoFkdAo+Mf13+uNnV9lChoBmgJaA9DCMql8QuvZPG/lIaUUpRoFUsyaBZHQKPi44oZydZ1fZQoaAZoCWgPQwjecYqO5HLjv5SGlFKUaBVLMmgWR0Cj4qnhjvuxdX2UKGgGaAloD0MIQWfSpuoe2b+UhpRSlGgVSzJoFkdAo+UcophF3XV9lChoBmgJaA9DCIem7PSDOuy/lIaUUpRoFUsyaBZHQKPk4Wa+evp1fZQoaAZoCWgPQwi3YRQEjy/1v5SGlFKUaBVLMmgWR0Cj5KWPtD2KdX2UKGgGaAloD0MI+BvtuOF337+UhpRSlGgVSzJoFkdAo+RsGA08/3V9lChoBmgJaA9DCJ4JTRJLyue/lIaUUpRoFUsyaBZHQKPmzZq20At1fZQoaAZoCWgPQwhZiuQrgZTrv5SGlFKUaBVLMmgWR0Cj5pGkvboKdX2UKGgGaAloD0MIqg1ORL825L+UhpRSlGgVSzJoFkdAo+ZU3l0YCXV9lChoBmgJaA9DCC7IluXrMuK/lIaUUpRoFUsyaBZHQKPmGnFYMfB1fZQoaAZoCWgPQwhgrdo1Ia3Wv5SGlFKUaBVLMmgWR0Cj59UAtFrmdX2UKGgGaAloD0MI4uR+h6JA4b+UhpRSlGgVSzJoFkdAo+eY4wRGt3V9lChoBmgJaA9DCPILryR5LuC/lIaUUpRoFUsyaBZHQKPnXC79Q411fZQoaAZoCWgPQwjiVkEMdO3av5SGlFKUaBVLMmgWR0Cj5yG2sq8UdX2UKGgGaAloD0MI1QloImz47b+UhpRSlGgVSzJoFkdAo+jIMrmQsHV9lChoBmgJaA9DCMX+snvysOG/lIaUUpRoFUsyaBZHQKPoi9aEBbR1fZQoaAZoCWgPQwh24JwRpb3Pv5SGlFKUaBVLMmgWR0Cj6E7+T/yYdX2UKGgGaAloD0MIyzDuBtFa2L+UhpRSlGgVSzJoFkdAo+gUlzEJjXV9lChoBmgJaA9DCJ0QOugSDuK/lIaUUpRoFUsyaBZHQKPpxnaFmFt1fZQoaAZoCWgPQwiFtMagE0Lfv5SGlFKUaBVLMmgWR0Cj6YpOvdM1dX2UKGgGaAloD0MIMpI9Qs2Q17+UhpRSlGgVSzJoFkdAo+lNe4TbnHV9lChoBmgJaA9DCC1dwTbiyd+/lIaUUpRoFUsyaBZHQKPpEyoGY8d1fZQoaAZoCWgPQwh8tDhjmBPtv5SGlFKUaBVLMmgWR0Cj6r5e7cwhdX2UKGgGaAloD0MIIo0KnGyD67+UhpRSlGgVSzJoFkdAo+qCQaJhv3V9lChoBmgJaA9DCLjoZKn1fuK/lIaUUpRoFUsyaBZHQKPqRVp9JBh1fZQoaAZoCWgPQwh8DcFxGbfqv5SGlFKUaBVLMmgWR0Cj6grVe8f3dX2UKGgGaAloD0MIWWyTisba6L+UhpRSlGgVSzJoFkdAo+vEJng5znV9lChoBmgJaA9DCNAJoYMu4ei/lIaUUpRoFUsyaBZHQKPriB3A2yd1fZQoaAZoCWgPQwgi/mFLj6bsv5SGlFKUaBVLMmgWR0Cj60twJgLJdX2UKGgGaAloD0MIpKZdTDPd4r+UhpRSlGgVSzJoFkdAo+sRFI/Z/XV9lChoBmgJaA9DCH1AoDNpU9a/lIaUUpRoFUsyaBZHQKPsvigkC3h1fZQoaAZoCWgPQwg7j4r/OyLiv5SGlFKUaBVLMmgWR0Cj7IH6/IsAdX2UKGgGaAloD0MIkrJF0m7067+UhpRSlGgVSzJoFkdAo+xFDv3JxXV9lChoBmgJaA9DCNi4/l2fOdq/lIaUUpRoFUsyaBZHQKPsCq7ROUN1fZQoaAZoCWgPQwhnmNpSB/njv5SGlFKUaBVLMmgWR0Cj7bV9F4LUdX2UKGgGaAloD0MIr7DgfsAD4L+UhpRSlGgVSzJoFkdAo+16NMoMKHV9lChoBmgJaA9DCFt6NNWT+d6/lIaUUpRoFUsyaBZHQKPtPlnyup11fZQoaAZoCWgPQwg4LuOmBhrlv5SGlFKUaBVLMmgWR0Cj7QTodMkAdX2UKGgGaAloD0MI0jk/xXFg7b+UhpRSlGgVSzJoFkdAo+61vAGjbnV9lChoBmgJaA9DCDfdskP8A/C/lIaUUpRoFUsyaBZHQKPueaya/h51fZQoaAZoCWgPQwhxkuaPaW3jv5SGlFKUaBVLMmgWR0Cj7jy+6Ae8dX2UKGgGaAloD0MI/P7NixNf4b+UhpRSlGgVSzJoFkdAo+4CYmb9ZXV9lChoBmgJaA9DCHRDU3b6weu/lIaUUpRoFUsyaBZHQKPvvl7tzCF1fZQoaAZoCWgPQwhL6C6JsyLyv5SGlFKUaBVLMmgWR0Cj74NHxz7udX2UKGgGaAloD0MI/z9OmDAa5b+UhpRSlGgVSzJoFkdAo+9H0NBnjHV9lChoBmgJaA9DCIgP7PgvkOG/lIaUUpRoFUsyaBZHQKPvDgQYk3V1fZQoaAZoCWgPQwgKStHKvYDwv5SGlFKUaBVLMmgWR0Cj8LRsdkrgdX2UKGgGaAloD0MIlQuVfy2v8b+UhpRSlGgVSzJoFkdAo/B4SteUp3V9lChoBmgJaA9DCGhYjLrW3tW/lIaUUpRoFUsyaBZHQKPwO0ALiMp1fZQoaAZoCWgPQwh/3H75ZMX2v5SGlFKUaBVLMmgWR0Cj8ADXFtKqdX2UKGgGaAloD0MI1SZO7nco47+UhpRSlGgVSzJoFkdAo/HD6k6903V9lChoBmgJaA9DCInsgywLJvW/lIaUUpRoFUsyaBZHQKPxh/vv0Ad1fZQoaAZoCWgPQwhCmNu93Cfpv5SGlFKUaBVLMmgWR0Cj8UxOUMXrdX2UKGgGaAloD0MIQ1VMpZ9w57+UhpRSlGgVSzJoFkdAo/ES2lVLjHV9lChoBmgJaA9DCNfZkH9m0PK/lIaUUpRoFUsyaBZHQKPyyBf8dgh1fZQoaAZoCWgPQwhcV8wIbw/Cv5SGlFKUaBVLMmgWR0Cj8oyP2f03dX2UKGgGaAloD0MIGLK61XNS6L+UhpRSlGgVSzJoFkdAo/JP2VVxTHV9lChoBmgJaA9DCF38bU+QWOm/lIaUUpRoFUsyaBZHQKPyFYr8R+V1fZQoaAZoCWgPQwjK/KNv0rTyv5SGlFKUaBVLMmgWR0Cj88p1q33IdX2UKGgGaAloD0MIBFlPrb669L+UhpRSlGgVSzJoFkdAo/OOXJHRTnV9lChoBmgJaA9DCF7acFgaeO6/lIaUUpRoFUsyaBZHQKPzUYb83uN1fZQoaAZoCWgPQwiVnBN7aB/jv5SGlFKUaBVLMmgWR0Cj8xcYyfthdX2UKGgGaAloD0MIZMqHoGr06L+UhpRSlGgVSzJoFkdAo/TDjebd8HV9lChoBmgJaA9DCLd++s+an+q/lIaUUpRoFUsyaBZHQKP0hzND+it1fZQoaAZoCWgPQwiSQINNnUfYv5SGlFKUaBVLMmgWR0Cj9EpiqhlEdX2UKGgGaAloD0MIeLgdGhZj8r+UhpRSlGgVSzJoFkdAo/QP0NBnjHV9lChoBmgJaA9DCF2lu+tsSOi/lIaUUpRoFUsyaBZHQKP1wIcinpB1fZQoaAZoCWgPQwhpNSTusfThv5SGlFKUaBVLMmgWR0Cj9YSimEXddX2UKGgGaAloD0MIBJDaxMm98b+UhpRSlGgVSzJoFkdAo/VIOx0MgHV9lChoBmgJaA9DCMpuZvSjYem/lIaUUpRoFUsyaBZHQKP1Dd/J/5N1fZQoaAZoCWgPQwihR4yeW+jXv5SGlFKUaBVLMmgWR0Cj9tDA8B+4dX2UKGgGaAloD0MIFR40u+6t3L+UhpRSlGgVSzJoFkdAo/aVX5nDi3V9lChoBmgJaA9DCB/3rdaJy+q/lIaUUpRoFUsyaBZHQKP2WRPoFFF1fZQoaAZoCWgPQwjbw14oYLvtv5SGlFKUaBVLMmgWR0Cj9h6kqMFVdX2UKGgGaAloD0MIxR7axwr+7L+UhpRSlGgVSzJoFkdAo/fXGff4y3V9lChoBmgJaA9DCK01lNqL6OW/lIaUUpRoFUsyaBZHQKP3mu0TlDF1fZQoaAZoCWgPQwhB8zl3u17Qv5SGlFKUaBVLMmgWR0Cj91420iQldX2UKGgGaAloD0MITuyhfazg67+UhpRSlGgVSzJoFkdAo/cj8YQ8OnV9lChoBmgJaA9DCGfROxVwz92/lIaUUpRoFUsyaBZHQKP46yKvV3F1fZQoaAZoCWgPQwhBR6ta0pHyv5SGlFKUaBVLMmgWR0Cj+K8SoOx0dX2UKGgGaAloD0MIrTWU2oto4b+UhpRSlGgVSzJoFkdAo/hyFRHf/HV9lChoBmgJaA9DCGcKndfYJdG/lIaUUpRoFUsyaBZHQKP4N7UG3Wp1ZS4="
|
83 |
},
|
84 |
"ep_success_buffer": {
|
85 |
":type:": "<class 'collections.deque'>",
|
a2c-PandaReachDense-v2/policy.optimizer.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 45438
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0939b48e291248b500c13f1c3f973f50296771b526ad86ae229cbe998f0608bc
|
3 |
size 45438
|
a2c-PandaReachDense-v2/policy.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 46718
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:eefd132a4b3fb88f774ebe7333b9943706c7b83a18295916ee174875d4d225d3
|
3 |
size 46718
|
config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f8e4f3da160>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc_data object at 0x7f8e4f43dcc0>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVowAAAAAAAAB9lCiMDGxvZ19zdGRfaW5pdJRK/v///4wKb3J0aG9faW5pdJSJjA9vcHRpbWl6ZXJfY2xhc3OUjBN0b3JjaC5vcHRpbS5ybXNwcm9wlIwHUk1TcHJvcJSTlIwQb3B0aW1pemVyX2t3YXJnc5R9lCiMBWFscGhhlEc/764UeuFHrowDZXBzlEc+5Pi1iONo8YwMd2VpZ2h0X2RlY2F5lEsAdXUu", "log_std_init": -2, "ortho_init": false, "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 1000000, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1674216357382478341, "learning_rate": 0.00096, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/T3UQTVUdaYWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAwpvfPtJOeTytmQk/wpvfPtJOeTytmQk/wpvfPtJOeTytmQk/wpvfPtJOeTytmQk/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAXBGsvh/k4L5E7Ji//VxZPyH1zL+7dRS/Tf6QP2VSjb+lg6O/gOTMv1fVyr8J2pK/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADCm98+0k55PK2ZCT+G0149naYDO5JiVT3Cm98+0k55PK2ZCT+G0149naYDO5JiVT3Cm98+0k55PK2ZCT+G0149naYDO5JiVT3Cm98+0k55PK2ZCT+G0149naYDO5JiVT2UaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[0.4367352 0.01521655 0.53750116]\n [0.4367352 0.01521655 0.53750116]\n [0.4367352 0.01521655 0.53750116]\n [0.4367352 0.01521655 0.53750116]]", "desired_goal": "[[-0.33606994 -0.43924043 -1.1947103 ]\n [ 0.84907514 -1.6012307 -0.5799214 ]\n [ 1.1327606 -1.104077 -1.277455 ]\n [-1.6007233 -1.5846356 -1.1472789 ]]", "observation": "[[0.4367352 0.01521655 0.53750116 0.05440094 0.00200883 0.05209596]\n [0.4367352 0.01521655 0.53750116 0.05440094 0.00200883 0.05209596]\n [0.4367352 0.01521655 0.53750116 0.05440094 0.00200883 0.05209596]\n [0.4367352 0.01521655 0.53750116 0.05440094 0.00200883 0.05209596]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAAVdNPYvIKj0L83g+Sf3SPdtf6z0bJpY8kLvuPMt+8jwJCgE+H579PdcQgD0bZUA+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[0.0501318 0.04169516 0.24311464]\n [0.10302217 0.11492892 0.01832872]\n [0.02914217 0.02960148 0.12601484]\n [0.12383675 0.06253212 0.18788569]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": true, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIIuF7f4P27L+UhpRSlIwBbJRLMowBdJRHQKTS13qRlpZ1fZQoaAZoCWgPQwgG2bJ8XQb7v5SGlFKUaBVLMmgWR0Ck0pwYLsrvdX2UKGgGaAloD0MIkGtDxTj/7r+UhpRSlGgVSzJoFkdApNJgazeGf3V9lChoBmgJaA9DCM9qgT0mEvq/lIaUUpRoFUsyaBZHQKTSJemelKt1fZQoaAZoCWgPQwia6V4n9SX3v5SGlFKUaBVLMmgWR0Ck0/L0z0pWdX2UKGgGaAloD0MI1PGYgcr477+UhpRSlGgVSzJoFkdApNO2+bmU4nV9lChoBmgJaA9DCD5A9+XM9vu/lIaUUpRoFUsyaBZHQKTTejh1klN1fZQoaAZoCWgPQwi/fLJiuLrwv5SGlFKUaBVLMmgWR0Ck0z7nX/YKdX2UKGgGaAloD0MIqaPjamQ3AMCUhpRSlGgVSzJoFkdApNT9LBbfQHV9lChoBmgJaA9DCKH18GWiiADAlIaUUpRoFUsyaBZHQKTUwSzw+dN1fZQoaAZoCWgPQwjSUQ5mE2Dxv5SGlFKUaBVLMmgWR0Ck1IQyZa3adX2UKGgGaAloD0MINbbXgt47A8CUhpRSlGgVSzJoFkdApNRI5o4+83V9lChoBmgJaA9DCMtHUtLD0AHAlIaUUpRoFUsyaBZHQKTWAZP2wmp1fZQoaAZoCWgPQwgBF2TL8nXtv5SGlFKUaBVLMmgWR0Ck1cWSlnAZdX2UKGgGaAloD0MIPX0E/vCz8b+UhpRSlGgVSzJoFkdApNWIwj+rEXV9lChoBmgJaA9DCLXdBN80vfu/lIaUUpRoFUsyaBZHQKTVTTKkl/p1fZQoaAZoCWgPQwhzEHS0qiUBwJSGlFKUaBVLMmgWR0Ck1wyG8EmqdX2UKGgGaAloD0MIr2Ab8WS3A8CUhpRSlGgVSzJoFkdApNbQwM6RyXV9lChoBmgJaA9DCMH/VrJjI/C/lIaUUpRoFUsyaBZHQKTWlAhStNl1fZQoaAZoCWgPQwhfuHNhpJfuv5SGlFKUaBVLMmgWR0Ck1lixu89PdX2UKGgGaAloD0MI6spneR4cBcCUhpRSlGgVSzJoFkdApNgY5eZ5RnV9lChoBmgJaA9DCAPOUrKcJAPAlIaUUpRoFUsyaBZHQKTX3RCQcPx1fZQoaAZoCWgPQwgoDwu1prn+v5SGlFKUaBVLMmgWR0Ck16B0IToMdX2UKGgGaAloD0MIEticg2fC67+UhpRSlGgVSzJoFkdApNdk/GEPD3V9lChoBmgJaA9DCIj2sYLfBvS/lIaUUpRoFUsyaBZHQKTZK3G4qgB1fZQoaAZoCWgPQwjYt5OI8G/wv5SGlFKUaBVLMmgWR0Ck2O98Z1mrdX2UKGgGaAloD0MI+gj84ee/8L+UhpRSlGgVSzJoFkdApNiyqp97W3V9lChoBmgJaA9DCJ0OZD21OgDAlIaUUpRoFUsyaBZHQKTYd3jdYXB1fZQoaAZoCWgPQwgYtJCA0SX9v5SGlFKUaBVLMmgWR0Ck2iyVObiIdX2UKGgGaAloD0MIX9ODglL0+7+UhpRSlGgVSzJoFkdApNnweJYT03V9lChoBmgJaA9DCA2nzM03IuW/lIaUUpRoFUsyaBZHQKTZs6QNkOJ1fZQoaAZoCWgPQwjh05y8yET6v5SGlFKUaBVLMmgWR0Ck2XhXjlxPdX2UKGgGaAloD0MI91llprR+6L+UhpRSlGgVSzJoFkdApNs668QI2XV9lChoBmgJaA9DCMe6uI0GsPe/lIaUUpRoFUsyaBZHQKTa/uO0b991fZQoaAZoCWgPQwhA+FCiJc/0v5SGlFKUaBVLMmgWR0Ck2sISL61tdX2UKGgGaAloD0MIOsssQrFV+L+UhpRSlGgVSzJoFkdApNqGaa1CxHV9lChoBmgJaA9DCLXEymjk8/i/lIaUUpRoFUsyaBZHQKTcPNSqEOB1fZQoaAZoCWgPQwiGxhNBnMfwv5SGlFKUaBVLMmgWR0Ck3ACmMwUQdX2UKGgGaAloD0MIDJQUWABT8b+UhpRSlGgVSzJoFkdApNvD/82rGXV9lChoBmgJaA9DCMnH7gIlhfO/lIaUUpRoFUsyaBZHQKTbiJlar3l1fZQoaAZoCWgPQwhy+Q/pty/0v5SGlFKUaBVLMmgWR0Ck3VOFQEZBdX2UKGgGaAloD0MIldV0PdF1/L+UhpRSlGgVSzJoFkdApN0XmYBvJnV9lChoBmgJaA9DCOFGyhZJu+6/lIaUUpRoFUsyaBZHQKTc2tnwob51fZQoaAZoCWgPQwgDJnDrbp7kv5SGlFKUaBVLMmgWR0Ck3J90Rvm6dX2UKGgGaAloD0MIGa95VWc18b+UhpRSlGgVSzJoFkdApN5Zv3rUsnV9lChoBmgJaA9DCCwtI/Weyuy/lIaUUpRoFUsyaBZHQKTeHbmEGqx1fZQoaAZoCWgPQwgn2epySsDsv5SGlFKUaBVLMmgWR0Ck3eDnNgSfdX2UKGgGaAloD0MI/7J78rDQ77+UhpRSlGgVSzJoFkdApN2lUfgaWHV9lChoBmgJaA9DCOlkqfV+o+O/lIaUUpRoFUsyaBZHQKTfYyvcJt11fZQoaAZoCWgPQwjtmSUBamrvv5SGlFKUaBVLMmgWR0Ck3ycYAKfGdX2UKGgGaAloD0MIehwG81cI9r+UhpRSlGgVSzJoFkdApN7qHGjsU3V9lChoBmgJaA9DCGAEjZlEvdy/lIaUUpRoFUsyaBZHQKTerqptJnR1fZQoaAZoCWgPQwim1ZC4x1Lqv5SGlFKUaBVLMmgWR0Ck4G4C6pYLdX2UKGgGaAloD0MI7mDEPgEU3r+UhpRSlGgVSzJoFkdApOAyDTSb6XV9lChoBmgJaA9DCN2U8loJ3fe/lIaUUpRoFUsyaBZHQKTf9TUiILx1fZQoaAZoCWgPQwgVi98UVirnv5SGlFKUaBVLMmgWR0Ck37nhjvuxdX2UKGgGaAloD0MIPGh23VsxAcCUhpRSlGgVSzJoFkdApOF8PSUkfXV9lChoBmgJaA9DCFcKgVziSOC/lIaUUpRoFUsyaBZHQKThQFbFCLN1fZQoaAZoCWgPQwgonN1aJgPwv5SGlFKUaBVLMmgWR0Ck4QORcNYsdX2UKGgGaAloD0MIeXO4VnvY5r+UhpRSlGgVSzJoFkdApODIDTz/ZXV9lChoBmgJaA9DCCJTPgRVo+u/lIaUUpRoFUsyaBZHQKTihXnQpnZ1fZQoaAZoCWgPQwgWvr7WpUbtv5SGlFKUaBVLMmgWR0Ck4klqrR0EdX2UKGgGaAloD0MIp88OuK4YA8CUhpRSlGgVSzJoFkdApOIMqDsdDXV9lChoBmgJaA9DCEseT8sP3O+/lIaUUpRoFUsyaBZHQKTh0SxqwhZ1fZQoaAZoCWgPQwh6GFqdnGHzv5SGlFKUaBVLMmgWR0Ck45i+tbLVdX2UKGgGaAloD0MIkGgCRSzi8L+UhpRSlGgVSzJoFkdApONcvK2a2HV9lChoBmgJaA9DCNJyoIfatve/lIaUUpRoFUsyaBZHQKTjIAQQL/l1fZQoaAZoCWgPQwgzU1p/SwD8v5SGlFKUaBVLMmgWR0Ck4uSo4uK5dX2UKGgGaAloD0MIwW9DjNd897+UhpRSlGgVSzJoFkdApOSq4SYgJXV9lChoBmgJaA9DCLSTwVHy6ta/lIaUUpRoFUsyaBZHQKTkbuivgWJ1fZQoaAZoCWgPQwgj93R1x+L4v5SGlFKUaBVLMmgWR0Ck5DIWYWtVdX2UKGgGaAloD0MIAeDYs+ey67+UhpRSlGgVSzJoFkdApOP2plz2e3V9lChoBmgJaA9DCPRTHAdeLfa/lIaUUpRoFUsyaBZHQKTlvI5o4+91fZQoaAZoCWgPQwj7rDJTWn/lv5SGlFKUaBVLMmgWR0Ck5YCPhhphdX2UKGgGaAloD0MIKsQj8fI08L+UhpRSlGgVSzJoFkdApOVD4zrNW3V9lChoBmgJaA9DCJHRAUnYt9i/lIaUUpRoFUsyaBZHQKTlCG0u14R1fZQoaAZoCWgPQwhSZK2h1N7jv5SGlFKUaBVLMmgWR0Ck5ssbFS88dX2UKGgGaAloD0MIKA8LtaZ5+r+UhpRSlGgVSzJoFkdApOaPGdZq23V9lChoBmgJaA9DCAw/OJ861vC/lIaUUpRoFUsyaBZHQKTmUkpI+W51fZQoaAZoCWgPQwih8xq7RDXxv5SGlFKUaBVLMmgWR0Ck5hb2lEZ0dX2UKGgGaAloD0MIqmVrfZFQ8b+UhpRSlGgVSzJoFkdApOfZNEgGKXV9lChoBmgJaA9DCJvHYTB/xfG/lIaUUpRoFUsyaBZHQKTnnRIBikR1fZQoaAZoCWgPQwiqYb8n1qnpv5SGlFKUaBVLMmgWR0Ck52BESdvsdX2UKGgGaAloD0MIY5y/CYXI8b+UhpRSlGgVSzJoFkdApOck/jbSJHV9lChoBmgJaA9DCAcI5ujx+/W/lIaUUpRoFUsyaBZHQKTo67/4qPR1fZQoaAZoCWgPQwi4PNaMDPLqv5SGlFKUaBVLMmgWR0Ck6K/b9If9dX2UKGgGaAloD0MIMewwJv297L+UhpRSlGgVSzJoFkdApOhzMTviLnV9lChoBmgJaA9DCFOT4A1pVO+/lIaUUpRoFUsyaBZHQKToN9a2Wpt1fZQoaAZoCWgPQwgk1Xd+UQLnv5SGlFKUaBVLMmgWR0Ck6fUEHMUzdX2UKGgGaAloD0MI2v8Aa9Uu6b+UhpRSlGgVSzJoFkdApOm48SwnpnV9lChoBmgJaA9DCPZ8zXLZaOq/lIaUUpRoFUsyaBZHQKTpfC2MKkV1fZQoaAZoCWgPQwiBd/Lpsa3jv5SGlFKUaBVLMmgWR0Ck6UC6QNkOdX2UKGgGaAloD0MIcCNli6Rd67+UhpRSlGgVSzJoFkdApOsLspoboHV9lChoBmgJaA9DCCV1ApoIm+S/lIaUUpRoFUsyaBZHQKTqz8xbjcV1fZQoaAZoCWgPQwjCwkmaPybxv5SGlFKUaBVLMmgWR0Ck6pMW43FUdX2UKGgGaAloD0MIUUoIVtXL47+UhpRSlGgVSzJoFkdApOpXdqL0jHV9lChoBmgJaA9DCDawVYLF4dq/lIaUUpRoFUsyaBZHQKTsEuGsV+J1fZQoaAZoCWgPQwhfKGA7GLHZv5SGlFKUaBVLMmgWR0Ck69br9l3AdX2UKGgGaAloD0MIilWDMLf77L+UhpRSlGgVSzJoFkdApOuaAavRq3V9lChoBmgJaA9DCKgY529CIea/lIaUUpRoFUsyaBZHQKTrXmfXf651ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 31250, "n_steps": 8, "gamma": 0.99, "gae_lambda": 0.9, "ent_coef": 0.0, "vf_coef": 0.4, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.29 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.8.10", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.21.6", "Gym": "0.21.0"}}
|
|
|
1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7fc0d02945e0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc_data object at 0x7fc0d028cae0>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVowAAAAAAAAB9lCiMDGxvZ19zdGRfaW5pdJRK/v///4wKb3J0aG9faW5pdJSJjA9vcHRpbWl6ZXJfY2xhc3OUjBN0b3JjaC5vcHRpbS5ybXNwcm9wlIwHUk1TcHJvcJSTlIwQb3B0aW1pemVyX2t3YXJnc5R9lCiMBWFscGhhlEc/764UeuFHrowDZXBzlEc+5Pi1iONo8YwMd2VpZ2h0X2RlY2F5lEsAdXUu", "log_std_init": -2, "ortho_init": false, "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 1000000, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1674225358461228826, "learning_rate": 0.00096, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOC9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/T3UQTVUdaYWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAlXCwPvRTwjz4xhw/lXCwPvRTwjz4xhw/lXCwPvRTwjz4xhw/lXCwPvRTwjz4xhw/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAnyYWP7Apu7+xpEW/1aA1P5W/Ub81LeM+c33APQ4CHL9EVcW/4+7aP/SDNz6FWnE/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAACVcLA+9FPCPPjGHD9NCWw9u8jSOvVEhD2VcLA+9FPCPPjGHD9NCWw9u8jSOvVEhD2VcLA+9FPCPPjGHD9NCWw9u8jSOvVEhD2VcLA+9FPCPPjGHD9NCWw9u8jSOvVEhD2UaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[0.34460893 0.02372167 0.612411 ]\n [0.34460893 0.02372167 0.612411 ]\n [0.34460893 0.02372167 0.612411 ]\n [0.34460893 0.02372167 0.612411 ]]", "desired_goal": "[[ 0.5865268 -1.4622097 -0.77204424]\n [ 0.70948535 -0.81932956 0.44370428]\n [ 0.09398928 -0.60940635 -1.5416646 ]\n [ 1.7104152 0.1792143 0.94278747]]", "observation": "[[0.34460893 0.02372167 0.612411 0.05762606 0.00160816 0.06458465]\n [0.34460893 0.02372167 0.612411 0.05762606 0.00160816 0.06458465]\n [0.34460893 0.02372167 0.612411 0.05762606 0.00160816 0.06458465]\n [0.34460893 0.02372167 0.612411 0.05762606 0.00160816 0.06458465]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAUOvZu8JVhLxRwks+n1BEvIQ1ED5biSE+Y1eOvarmpz0TkiI+9T6DvdMGYL0rGqQ9lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[-0.00665037 -0.01615417 0.19898345]\n [-0.01198211 0.14082915 0.15775053]\n [-0.06950261 0.08198293 0.15876035]\n [-0.06408492 -0.05469401 0.08012804]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": true, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIYmngRzVs4r+UhpRSlIwBbJRLMowBdJRHQKPdACih37l1fZQoaAZoCWgPQwiygXSxaaXav5SGlFKUaBVLMmgWR0Cj3MPcafjCdX2UKGgGaAloD0MIrrzkf/J347+UhpRSlGgVSzJoFkdAo9yG3MINVnV9lChoBmgJaA9DCEEQIEPHDtC/lIaUUpRoFUsyaBZHQKPcTGEPDpF1fZQoaAZoCWgPQwi1+uqqQC3nv5SGlFKUaBVLMmgWR0Cj3nJZwGW2dX2UKGgGaAloD0MI5h99k6bB57+UhpRSlGgVSzJoFkdAo943HBDXv3V9lChoBmgJaA9DCEuxo3GoX+O/lIaUUpRoFUsyaBZHQKPd+zQ/oq11fZQoaAZoCWgPQwid19glqrfMv5SGlFKUaBVLMmgWR0Cj3cG8VYZEdX2UKGgGaAloD0MIuyU5YFeT5b+UhpRSlGgVSzJoFkdAo+ANIZqEe3V9lChoBmgJaA9DCAosgCkDB9a/lIaUUpRoFUsyaBZHQKPf0c3l0YF1fZQoaAZoCWgPQwhAa378pUXjv5SGlFKUaBVLMmgWR0Cj35XPZ7HAdX2UKGgGaAloD0MILudSXFV257+UhpRSlGgVSzJoFkdAo99b39JjD3V9lChoBmgJaA9DCBmuDoC4K+W/lIaUUpRoFUsyaBZHQKPhvWqcVgx1fZQoaAZoCWgPQwiBCkeQSjHnv5SGlFKUaBVLMmgWR0Cj4YKTSsr/dX2UKGgGaAloD0MIKgMHtHQF2b+UhpRSlGgVSzJoFkdAo+FGcc2itnV9lChoBmgJaA9DCDbNO07REeO/lIaUUpRoFUsyaBZHQKPhDOIqLCN1fZQoaAZoCWgPQwgD6zh+qDTfv5SGlFKUaBVLMmgWR0Cj41tW2gFpdX2UKGgGaAloD0MIhSLdzylI47+UhpRSlGgVSzJoFkdAo+Mf13+uNnV9lChoBmgJaA9DCMql8QuvZPG/lIaUUpRoFUsyaBZHQKPi44oZydZ1fZQoaAZoCWgPQwjecYqO5HLjv5SGlFKUaBVLMmgWR0Cj4qnhjvuxdX2UKGgGaAloD0MIQWfSpuoe2b+UhpRSlGgVSzJoFkdAo+UcophF3XV9lChoBmgJaA9DCIem7PSDOuy/lIaUUpRoFUsyaBZHQKPk4Wa+evp1fZQoaAZoCWgPQwi3YRQEjy/1v5SGlFKUaBVLMmgWR0Cj5KWPtD2KdX2UKGgGaAloD0MI+BvtuOF337+UhpRSlGgVSzJoFkdAo+RsGA08/3V9lChoBmgJaA9DCJ4JTRJLyue/lIaUUpRoFUsyaBZHQKPmzZq20At1fZQoaAZoCWgPQwhZiuQrgZTrv5SGlFKUaBVLMmgWR0Cj5pGkvboKdX2UKGgGaAloD0MIqg1ORL825L+UhpRSlGgVSzJoFkdAo+ZU3l0YCXV9lChoBmgJaA9DCC7IluXrMuK/lIaUUpRoFUsyaBZHQKPmGnFYMfB1fZQoaAZoCWgPQwhgrdo1Ia3Wv5SGlFKUaBVLMmgWR0Cj59UAtFrmdX2UKGgGaAloD0MI4uR+h6JA4b+UhpRSlGgVSzJoFkdAo+eY4wRGt3V9lChoBmgJaA9DCPILryR5LuC/lIaUUpRoFUsyaBZHQKPnXC79Q411fZQoaAZoCWgPQwjiVkEMdO3av5SGlFKUaBVLMmgWR0Cj5yG2sq8UdX2UKGgGaAloD0MI1QloImz47b+UhpRSlGgVSzJoFkdAo+jIMrmQsHV9lChoBmgJaA9DCMX+snvysOG/lIaUUpRoFUsyaBZHQKPoi9aEBbR1fZQoaAZoCWgPQwh24JwRpb3Pv5SGlFKUaBVLMmgWR0Cj6E7+T/yYdX2UKGgGaAloD0MIyzDuBtFa2L+UhpRSlGgVSzJoFkdAo+gUlzEJjXV9lChoBmgJaA9DCJ0QOugSDuK/lIaUUpRoFUsyaBZHQKPpxnaFmFt1fZQoaAZoCWgPQwiFtMagE0Lfv5SGlFKUaBVLMmgWR0Cj6YpOvdM1dX2UKGgGaAloD0MIMpI9Qs2Q17+UhpRSlGgVSzJoFkdAo+lNe4TbnHV9lChoBmgJaA9DCC1dwTbiyd+/lIaUUpRoFUsyaBZHQKPpEyoGY8d1fZQoaAZoCWgPQwh8tDhjmBPtv5SGlFKUaBVLMmgWR0Cj6r5e7cwhdX2UKGgGaAloD0MIIo0KnGyD67+UhpRSlGgVSzJoFkdAo+qCQaJhv3V9lChoBmgJaA9DCLjoZKn1fuK/lIaUUpRoFUsyaBZHQKPqRVp9JBh1fZQoaAZoCWgPQwh8DcFxGbfqv5SGlFKUaBVLMmgWR0Cj6grVe8f3dX2UKGgGaAloD0MIWWyTisba6L+UhpRSlGgVSzJoFkdAo+vEJng5znV9lChoBmgJaA9DCNAJoYMu4ei/lIaUUpRoFUsyaBZHQKPriB3A2yd1fZQoaAZoCWgPQwgi/mFLj6bsv5SGlFKUaBVLMmgWR0Cj60twJgLJdX2UKGgGaAloD0MIpKZdTDPd4r+UhpRSlGgVSzJoFkdAo+sRFI/Z/XV9lChoBmgJaA9DCH1AoDNpU9a/lIaUUpRoFUsyaBZHQKPsvigkC3h1fZQoaAZoCWgPQwg7j4r/OyLiv5SGlFKUaBVLMmgWR0Cj7IH6/IsAdX2UKGgGaAloD0MIkrJF0m7067+UhpRSlGgVSzJoFkdAo+xFDv3JxXV9lChoBmgJaA9DCNi4/l2fOdq/lIaUUpRoFUsyaBZHQKPsCq7ROUN1fZQoaAZoCWgPQwhnmNpSB/njv5SGlFKUaBVLMmgWR0Cj7bV9F4LUdX2UKGgGaAloD0MIr7DgfsAD4L+UhpRSlGgVSzJoFkdAo+16NMoMKHV9lChoBmgJaA9DCFt6NNWT+d6/lIaUUpRoFUsyaBZHQKPtPlnyup11fZQoaAZoCWgPQwg4LuOmBhrlv5SGlFKUaBVLMmgWR0Cj7QTodMkAdX2UKGgGaAloD0MI0jk/xXFg7b+UhpRSlGgVSzJoFkdAo+61vAGjbnV9lChoBmgJaA9DCDfdskP8A/C/lIaUUpRoFUsyaBZHQKPueaya/h51fZQoaAZoCWgPQwhxkuaPaW3jv5SGlFKUaBVLMmgWR0Cj7jy+6Ae8dX2UKGgGaAloD0MI/P7NixNf4b+UhpRSlGgVSzJoFkdAo+4CYmb9ZXV9lChoBmgJaA9DCHRDU3b6weu/lIaUUpRoFUsyaBZHQKPvvl7tzCF1fZQoaAZoCWgPQwhL6C6JsyLyv5SGlFKUaBVLMmgWR0Cj74NHxz7udX2UKGgGaAloD0MI/z9OmDAa5b+UhpRSlGgVSzJoFkdAo+9H0NBnjHV9lChoBmgJaA9DCIgP7PgvkOG/lIaUUpRoFUsyaBZHQKPvDgQYk3V1fZQoaAZoCWgPQwgKStHKvYDwv5SGlFKUaBVLMmgWR0Cj8LRsdkrgdX2UKGgGaAloD0MIlQuVfy2v8b+UhpRSlGgVSzJoFkdAo/B4SteUp3V9lChoBmgJaA9DCGhYjLrW3tW/lIaUUpRoFUsyaBZHQKPwO0ALiMp1fZQoaAZoCWgPQwh/3H75ZMX2v5SGlFKUaBVLMmgWR0Cj8ADXFtKqdX2UKGgGaAloD0MI1SZO7nco47+UhpRSlGgVSzJoFkdAo/HD6k6903V9lChoBmgJaA9DCInsgywLJvW/lIaUUpRoFUsyaBZHQKPxh/vv0Ad1fZQoaAZoCWgPQwhCmNu93Cfpv5SGlFKUaBVLMmgWR0Cj8UxOUMXrdX2UKGgGaAloD0MIQ1VMpZ9w57+UhpRSlGgVSzJoFkdAo/ES2lVLjHV9lChoBmgJaA9DCNfZkH9m0PK/lIaUUpRoFUsyaBZHQKPyyBf8dgh1fZQoaAZoCWgPQwhcV8wIbw/Cv5SGlFKUaBVLMmgWR0Cj8oyP2f03dX2UKGgGaAloD0MIGLK61XNS6L+UhpRSlGgVSzJoFkdAo/JP2VVxTHV9lChoBmgJaA9DCF38bU+QWOm/lIaUUpRoFUsyaBZHQKPyFYr8R+V1fZQoaAZoCWgPQwjK/KNv0rTyv5SGlFKUaBVLMmgWR0Cj88p1q33IdX2UKGgGaAloD0MIBFlPrb669L+UhpRSlGgVSzJoFkdAo/OOXJHRTnV9lChoBmgJaA9DCF7acFgaeO6/lIaUUpRoFUsyaBZHQKPzUYb83uN1fZQoaAZoCWgPQwiVnBN7aB/jv5SGlFKUaBVLMmgWR0Cj8xcYyfthdX2UKGgGaAloD0MIZMqHoGr06L+UhpRSlGgVSzJoFkdAo/TDjebd8HV9lChoBmgJaA9DCLd++s+an+q/lIaUUpRoFUsyaBZHQKP0hzND+it1fZQoaAZoCWgPQwiSQINNnUfYv5SGlFKUaBVLMmgWR0Cj9EpiqhlEdX2UKGgGaAloD0MIeLgdGhZj8r+UhpRSlGgVSzJoFkdAo/QP0NBnjHV9lChoBmgJaA9DCF2lu+tsSOi/lIaUUpRoFUsyaBZHQKP1wIcinpB1fZQoaAZoCWgPQwhpNSTusfThv5SGlFKUaBVLMmgWR0Cj9YSimEXddX2UKGgGaAloD0MIBJDaxMm98b+UhpRSlGgVSzJoFkdAo/VIOx0MgHV9lChoBmgJaA9DCMpuZvSjYem/lIaUUpRoFUsyaBZHQKP1Dd/J/5N1fZQoaAZoCWgPQwihR4yeW+jXv5SGlFKUaBVLMmgWR0Cj9tDA8B+4dX2UKGgGaAloD0MIFR40u+6t3L+UhpRSlGgVSzJoFkdAo/aVX5nDi3V9lChoBmgJaA9DCB/3rdaJy+q/lIaUUpRoFUsyaBZHQKP2WRPoFFF1fZQoaAZoCWgPQwjbw14oYLvtv5SGlFKUaBVLMmgWR0Cj9h6kqMFVdX2UKGgGaAloD0MIxR7axwr+7L+UhpRSlGgVSzJoFkdAo/fXGff4y3V9lChoBmgJaA9DCK01lNqL6OW/lIaUUpRoFUsyaBZHQKP3mu0TlDF1fZQoaAZoCWgPQwhB8zl3u17Qv5SGlFKUaBVLMmgWR0Cj91420iQldX2UKGgGaAloD0MITuyhfazg67+UhpRSlGgVSzJoFkdAo/cj8YQ8OnV9lChoBmgJaA9DCGfROxVwz92/lIaUUpRoFUsyaBZHQKP46yKvV3F1fZQoaAZoCWgPQwhBR6ta0pHyv5SGlFKUaBVLMmgWR0Cj+K8SoOx0dX2UKGgGaAloD0MIrTWU2oto4b+UhpRSlGgVSzJoFkdAo/hyFRHf/HV9lChoBmgJaA9DCGcKndfYJdG/lIaUUpRoFUsyaBZHQKP4N7UG3Wp1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 31250, "n_steps": 8, "gamma": 0.99, "gae_lambda": 0.9, "ent_coef": 0.0, "vf_coef": 0.4, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.29 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.8.10", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.21.6", "Gym": "0.21.0"}}
|
replay.mp4
CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
|
|
results.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"mean_reward": -0.
|
|
|
1 |
+
{"mean_reward": -0.47196308803977444, "std_reward": 0.17735820248907164, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-01-20T15:19:20.779378"}
|
vec_normalize.pkl
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 3056
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f2cb1fff9283427d0e1f594146ea07c1676dd2b53b8bb1b148b6b5604b98e4d3
|
3 |
size 3056
|