ppo model for lunar_lander_v2 pushed
Browse files- README.md +1 -1
- config.json +1 -1
- lander_model_ppo.zip +2 -2
- lander_model_ppo/data +18 -18
- lander_model_ppo/policy.optimizer.pth +1 -1
- lander_model_ppo/policy.pth +1 -1
- replay.mp4 +0 -0
- results.json +1 -1
README.md
CHANGED
@@ -16,7 +16,7 @@ model-index:
|
|
16 |
type: LunarLander-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
-
value:
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
|
|
16 |
type: LunarLander-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
+
value: 270.70 +/- 17.69
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x79204a330940>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x79204a3309d0>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x79204a330a60>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x79204a330af0>", "_build": "<function ActorCriticPolicy._build at 0x79204a330b80>", "forward": "<function ActorCriticPolicy.forward at 0x79204a330c10>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x79204a330ca0>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x79204a330d30>", "_predict": "<function ActorCriticPolicy._predict at 0x79204a330dc0>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x79204a330e50>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x79204a330ee0>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x79204a330f70>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x79204a4d7c80>"}, "verbose": 1, "policy_kwargs": {}, "num_timesteps": 1015808, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1696257132723951797, "learning_rate": 0.0003, "tensorboard_log": null, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAADpjQj6Us6C8Ira5O9+tL7qeTw2+qIcEuwAAgD8AAIA/sxo0Pswepz5bQZE8be7EvohsXT3eeCm9AAAAAAAAAABGFz8+WzyZvMtwdbs5qMA5XAAMvk4apToAAIA/AACAPw3R271MKXY/oWarvmQ9R7/iefe9dzkavQAAAAAAAAAAs5MxPkHdg7zKI7G6e1T5ONOg6b06deg5AAAAAAAAgD+oIa2+CHOtPk7UOT59Wgy/EBvUvsu0PT4AAAAAAAAAAIAKq73XFBy7nmB/PEa6aDwejUS8PCNIPQAAAAAAAIA/JqPIPcP1Lbjt94kyR69rrrAoA7ytmwCzAAAAAAAAgD8NLgo+H/+1uzHvBT2lYFC7+P8EvX71MLwAAIA/AACAP0ZuJb5WOXI/Gtfivg4KP7+aN2u+S/T6vQAAAAAAAAAAhoegPl+koTxt7Hg8rYOfPFQCWT672XK9AACAPwAAgD/zeDY+cUNpP8emsT6EAkC/5uofPgzmxLwAAAAAAAAAAK0vdT43ZCQ/YuaSPiPdLL/PqJA+0X4VuwAAAAAAAAAAmqyePJUSWD9tA+s8Fkc/v9J1FD2N2m08AAAAAAAAAADNBpE8+5x3P6pQIT3vUzu/LuA9Pfz4QrwAAAAAAAAAAPMu472uKaW6EVRJPZd2EzU/sZ45U20NNAAAgD8AAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.015808000000000044, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV+wsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQHATK+FlCkaMAWyUS7SMAXSUR0CZ/ltVJcxCdX2UKGgGR0Bvbi68QI2PaAdLyGgIR0CaAC1rIo3KdX2UKGgGR0BwuCcEvCdjaAdLqGgIR0CaALGlANXpdX2UKGgGR0Bxl6GZeAuqaAdLsmgIR0CaAgZPl+3IdX2UKGgGR0BkBKeGwiaBaAdN6ANoCEdAmgM6jvd/KHV9lChoBkdAZO2Lb5/LDGgHTegDaAhHQJoDXbJwKjV1fZQoaAZHQHKDuueSSvFoB0vTaAhHQJoEIIkZ75V1fZQoaAZHQHDLJwjt5UtoB0vSaAhHQJoFAJPZZjh1fZQoaAZHQHJv03XI2floB00YAWgIR0CaBZlFMIu5dX2UKGgGR0BwEzrqt5lfaAdLyGgIR0CaBbXC0ngHdX2UKGgGR0Bd1duLrHENaAdN6ANoCEdAmgbWcriEQHV9lChoBkdAcHqdFvybx2gHS7doCEdAmgdDcynDSHV9lChoBkdAcVBhMajveGgHS+toCEdAmgiP9kz413V9lChoBkdAb5wcU/OdG2gHS8FoCEdAmgjPhqCYkXV9lChoBkdAcCFFc6eXiWgHS+JoCEdAmgshshxHXnV9lChoBkdAckcUS7GvOmgHS/ZoCEdAmgvwBDG96HV9lChoBkdAb6OFMZgogGgHTegCaAhHQJoL6S9ugpV1fZQoaAZHQGX02szVMEloB03oA2gIR0CaDMKjBVMmdX2UKGgGR0Bwfaw+t8u0aAdL4GgIR0CaDS0iyIHkdX2UKGgGR0Bauttygf2caAdN6ANoCEdAmg3I24uscXV9lChoBkdAciNpljEvTWgHS9xoCEdAmg3gMUh3aHV9lChoBkdAcR+7O3UhFGgHS79oCEdAmg6Say8jA3V9lChoBkdAcFTrksBhhGgHS7VoCEdAmg+m21D0DnV9lChoBkdAcKRS9ugpSmgHS65oCEdAmg+nmFJxvXV9lChoBkdAcSicawUxmGgHTUMBaAhHQJoR9Q1rIo51fZQoaAZHQHGfsbWEsatoB00gAWgIR0CaEgWFvhqCdX2UKGgGR0BwLqHYYixFaAdLxmgIR0CaFOWlMyrQdX2UKGgGR0BvnlXcQAdXaAdL3WgIR0CaFSjMV1wHdX2UKGgGR0BwGusxO+IuaAdL0GgIR0CaFdAOrhitdX2UKGgGR0BwP5AGB4D+aAdLyWgIR0CaFi9XcQAddX2UKGgGR0BvTgRsdkrgaAdLw2gIR0CaFtKaG5+ZdX2UKGgGR0BxoytT1kDqaAdL32gIR0CaF0aBI4EPdX2UKGgGR0ByxkFjd56daAdL6GgIR0CaGcrXlKbsdX2UKGgGR0BwVebAk9lmaAdLxmgIR0CaGuGzru6VdX2UKGgGR0BxAiLxZuAJaAdLyWgIR0CaGvWkrPMTdX2UKGgGR0BgsunsLORlaAdN6ANoCEdAmh8ckMTewnV9lChoBkdAcz7Nvfj0c2gHTcIBaAhHQJofYVj7Q9l1fZQoaAZHQHCbOYlY2bZoB0vfaAhHQJofl6Rhc7h1fZQoaAZHQG+YsAvL5h1oB0vIaAhHQJohIB6rvLJ1fZQoaAZHQHACW6ClJpZoB00HAWgIR0CaIe0Re1KHdX2UKGgGR0BxNbYAbQ1KaAdL82gIR0CaIoU34sVddX2UKGgGR0BnFcAHVwxWaAdN6ANoCEdAmiLWiDdxhnV9lChoBkdAdECX5FgDzWgHTSEBaAhHQJojSiWVu791fZQoaAZHQHFzI4hllK9oB0vJaAhHQJoj/gYP5Hp1fZQoaAZHQG9rBxgiNbVoB0vyaAhHQJoksNWluWN1fZQoaAZHQFzLHoX9BKNoB03oA2gIR0CaJSQlruYydX2UKGgGR0Bw+pFOO802aAdNTwFoCEdAmiUw22oegnV9lChoBkdAcUH/c32mHmgHS8hoCEdAmibMVpKzzHV9lChoBkdAN8CHZbpu/GgHS21oCEdAmidgc5sCT3V9lChoBkdAcg1RK6FuemgHS91oCEdAmieJA+pwTHV9lChoBkdAb1MmZVn27GgHS8BoCEdAmigE56t1ZHV9lChoBkdAcVrSIP9UCWgHS+toCEdAmijhGpda+3V9lChoBkdAcOwF85S3s2gHS85oCEdAmilVmvnr6nV9lChoBkdAcvo0u14PgGgHTSIBaAhHQJopbijtXxR1fZQoaAZHQG25e67NB4VoB0vRaAhHQJop3DuSfUZ1fZQoaAZHQFrD4jbBXS1oB03oA2gIR0CaKzE+gUUPdX2UKGgGR0BxeMRGtp22aAdL2GgIR0CaK64Vh1DCdX2UKGgGR0BuKuLm6oVEaAdL+WgIR0CaLMewcHW0dX2UKGgGR0BxoONm16VuaAdNIwFoCEdAmi2snJDE33V9lChoBkdAb9u3G4qgAmgHS+hoCEdAmi4AsTWXknV9lChoBkdAcOBDIikftGgHS9xoCEdAmi5nTy8SPHV9lChoBkdAcPGEHMUypWgHS9poCEdAmi7ZCrtE5XV9lChoBkdAb7bQ2MsH0WgHS8doCEdAmi8aClJpWXV9lChoBkdAcB61gYxcmmgHS9NoCEdAmi/z2SMcZXV9lChoBkdAcT6LqlgtvmgHTSoBaAhHQJowp+G47Rx1fZQoaAZHQHBLDopx3mpoB0vfaAhHQJowyA4GUwB1fZQoaAZHQHFyPyXlbNdoB0v5aAhHQJoxEOZssQN1fZQoaAZHQGF0iV0Lc9JoB03oA2gIR0CaMjHRkVesdX2UKGgGR0BwtIA80UGnaAdL5mgIR0CaMw6ciGFjdX2UKGgGR0Bxp6EmICU5aAdLzWgIR0CaNF/ATIvKdX2UKGgGR0BwF9Tzd1uBaAdLx2gIR0CaNPqU/wAmdX2UKGgGR0BhZoWepXIVaAdN6ANoCEdAmjVsdYGMXXV9lChoBkdAclCRCx/us2gHTTkBaAhHQJo1bWWhRIl1fZQoaAZHQHKZiq6vq1RoB0u6aAhHQJo2MAhje9B1fZQoaAZHQHChXVwxWT5oB0vZaAhHQJo2Re5WilB1fZQoaAZHQHBzB1LamGdoB0vhaAhHQJo2QjhUBGR1fZQoaAZHQHChZZwGW2RoB0u9aAhHQJo2/+GXXy11fZQoaAZHQHMwJTMqz7doB00qAWgIR0CaN4ZezD4ydX2UKGgGR0BvB/OdGy5aaAdLyGgIR0CaN41Tzd1udX2UKGgGR0ByAForWiDeaAdL3mgIR0CaN8hmoR7JdX2UKGgGR0Bu86zmfXf7aAdL02gIR0CaOMuYx+KCdX2UKGgGR0BxJbkMkQf7aAdLtmgIR0CaOkQpF1B/dX2UKGgGR0ByYT2L5ylvaAdLy2gIR0CaO0nPmgandX2UKGgGR0BxD+4oZydXaAdLtWgIR0CaO2ZvDP4VdX2UKGgGR0Bwj6KDTSb6aAdLvGgIR0CaO7dJJ5E/dX2UKGgGR0Bw5smD15B1aAdLsGgIR0CaPDLSNOuadX2UKGgGR0Bwa6/CZWq+aAdL62gIR0CaPFiYsunNdX2UKGgGR0BAeriuMdcTaAdLlmgIR0CaPE25hBqsdX2UKGgGR0BkC2Hck+otaAdN6ANoCEdAmjzypm29c3V9lChoBkdAcF++DvmYB2gHS79oCEdAmj1D2i+L33V9lChoBkdAcAlwMH8jzWgHTQABaAhHQJo92dFvybx1fZQoaAZHQHBx7LlmvntoB0vZaAhHQJo+CuX/o7p1fZQoaAZHQG2jLWRRuTBoB0u/aAhHQJpAQtXgccV1fZQoaAZHQHJvyKR+z+poB0v4aAhHQJpAkNYr8SB1fZQoaAZHQHC3I8yN4qxoB0vIaAhHQJpBw4EOiFl1fZQoaAZHQGHg8zImw7loB03oA2gIR0CaQgKPXCj2dX2UKGgGR0BwLNYzSCvpaAdLwmgIR0CaQgLfDUExdX2UKGgGR0BybE9hZyMlaAdLsGgIR0CaQgJrcj7idX2UKGgGR0BxK+RzRx95aAdL2GgIR0CaQmGNaQmvdX2UKGgGR0BwAvwRXfZVaAdLv2gIR0CaQnnL7oB8dWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 310, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVcAIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoB4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoECiWCAAAAAAAAAABAQEBAQEBAZRoFEsIhZRoGHSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBAoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaApLCIWUaBh0lFKUjARoaWdolGgQKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgKSwiFlGgYdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_shape": [8], "low": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "low_repr": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high_repr": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWV1QAAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCloCmgOjApfbnBfcmFuZG9tlE51Yi4=", "n": "4", "start": "0", "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 16, "n_steps": 2048, "gamma": 0.99, "gae_lambda": 0.95, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 10, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8zqSowVTJhhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-5.15.120+-x86_64-with-glibc2.35 # 1 SMP Wed Aug 30 11:19:59 UTC 2023", "Python": "3.10.12", "Stable-Baselines3": "2.0.0a5", "PyTorch": "2.0.1+cu118", "GPU Enabled": "True", "Numpy": "1.23.5", "Cloudpickle": "2.2.1", "Gymnasium": "0.28.1", "OpenAI Gym": "0.25.2"}}
|
|
|
1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x79654eb65240>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x79654eb652d0>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x79654eb65360>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x79654eb653f0>", "_build": "<function ActorCriticPolicy._build at 0x79654eb65480>", "forward": "<function ActorCriticPolicy.forward at 0x79654eb65510>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x79654eb655a0>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x79654eb65630>", "_predict": "<function ActorCriticPolicy._predict at 0x79654eb656c0>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x79654eb65750>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x79654eb657e0>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x79654eb65870>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x79654eb07a00>"}, "verbose": 1, "policy_kwargs": {}, "num_timesteps": 1015808, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1696276637957944240, "learning_rate": 0.0003, "tensorboard_log": null, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAAA53lL7xITg/2/I2vtpaIb/UduO+bVwjPQAAAAAAAAAAei99vrxbUT597Wc+lUSHvvLqqL1rDMA9AAAAAAAAAACNHY89KYAquiK8yTfRIXWy3Tutu+5E6rYAAIA/AAAAAJpvi73eZ7o/5Slxvsfpgr6j+xS9RqoCvgAAAAAAAAAAmsd3Peyp/bnNFmy5VdgQtK78lToq7Iw4AACAPwAAgD+AjgG9gAWhP2ir171kQyC/gp2xvAZBeDwAAAAAAAAAAMB9L740xJu8NZl+OqlY0DgKfww+dsetuQAAgD8AAIA/Zqw9PPa4ProL9Fa4/7Dds1DB9To6QHk3AACAPwAAgD/NqKG9KWQTunMrGLipvrGzWI8FO59zMTcAAAAAAAAAADNrbrwJXSc9tp2ZvrcRMr7rjAS+QC4OvAAAAAAAAAAAc0aMPSFGuj3BPgq+exVqvnH8vTrupia9AAAAAAAAAADNIJq70t6Ku+CVpr2ilLI8BazKPPI/l70AAIA/AACAPwCQZDyPBnm6HS9AOzb2kTxrdts6YxB+PQAAgD8AAIA/trpbvpc3wT41zuI9MSK0vg2NBb463do9AAAAAAAAAAAA4+a8hYeHu9WIjT1vVLy6jKfkvDnBoLsAAIA/AACAP2Ytnj2L/ow/kHasPi5nRL9S9O49HpMPPgAAAAAAAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVgwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSxCFlIwBQ5R0lFKULg=="}, "_last_original_obs": null, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": -0.015808000000000044, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV5gsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQGKh1qN6w+uMAWyUTegDjAF0lEdAuMfgbCJoCnV9lChoBkdAci1ytV7x/mgHS/5oCEdAuMfkRIz3y3V9lChoBkdAceDP5HmRvGgHS9toCEdAuMfs9kjHGXV9lChoBkdAcl/yGi5/b2gHS8NoCEdAuMf5uaWonHV9lChoBkdAcmoU/OdGzGgHTTsBaAhHQLjIA5IpYtB1fZQoaAZHQHHiJ7PY4AFoB0vhaAhHQLjIQiNbTtt1fZQoaAZHQHIsCQ5myxBoB0vaaAhHQLjIWh5gPVd1fZQoaAZHQHPvZjx0+1VoB0vLaAhHQLjIZG7Bfrt1fZQoaAZHQHJJED2alUJoB0vZaAhHQLjIcztCzC11fZQoaAZHQHLcUxM36yloB0vwaAhHQLjIgV7Qb+91fZQoaAZHQHBrm6bvw3JoB0uvaAhHQLjIj00WM0h1fZQoaAZHQG4argGbCrNoB0u7aAhHQLjIj5WBBiV1fZQoaAZHQHAzTf3vhIhoB0u7aAhHQLjInakhzNl1fZQoaAZHQHLVZXp4bCJoB0vFaAhHQLjIn1eSjg11fZQoaAZHQHDwRz3h4t9oB0vPaAhHQLjIyXdCVr11fZQoaAZHQHE1uWv8qF1oB0uvaAhHQLjI23evZAZ1fZQoaAZHQHEAr6P8yetoB0u4aAhHQLjI998JD3N1fZQoaAZHQHIt7655JK9oB0vhaAhHQLjJCJpFkQR1fZQoaAZHQHPMhnvlU6xoB0vXaAhHQLjJHaYu01J1fZQoaAZHQHECPmcOLBNoB0vMaAhHQLjJI1G9YfZ1fZQoaAZHQHKTLidat9xoB0vEaAhHQLjJYXJYDDF1fZQoaAZHQHLi95dGAkNoB0v3aAhHQLjJbR9w3o91fZQoaAZHQHJqnQUpNK1oB0viaAhHQLjJq6aLGaR1fZQoaAZHQHM3rp3X7LtoB0vZaAhHQLjJubY9Pk91fZQoaAZHQHGSmYOUdJdoB0u9aAhHQLjJvF4LThJ1fZQoaAZHQHMiIV2zOX5oB0voaAhHQLjJw3MY/FB1fZQoaAZHQHEz4kmhM8JoB0uoaAhHQLjJ0L9uP3l1fZQoaAZHQHKDsKXv6TJoB0vXaAhHQLjJ1h5xBE91fZQoaAZHQHNiKDPGACpoB0vQaAhHQLjJ3HavicZ1fZQoaAZHQHFE5OWSlnBoB0vqaAhHQLjJ7rqt5lh1fZQoaAZHQHHH2KQ7tAtoB0u5aAhHQLjJ95OJtSB1fZQoaAZHQHC9g1FYuChoB0u2aAhHQLjKGBBAv+R1fZQoaAZHQHHH6KP4mC1oB0u7aAhHQLjKOAyVObl1fZQoaAZHQHD8D+zdDY1oB0vHaAhHQLjKQ7Z39rJ1fZQoaAZHQHGbLUsnRb9oB0upaAhHQLjKW2Pkq+d1fZQoaAZHQG4tfWDpTuRoB0vHaAhHQLjKmGcnVoZ1fZQoaAZHQHFEP/JeVs1oB0vJaAhHQLjK5Ys/Y8N1fZQoaAZHQHFDBhx5s0poB0vJaAhHQLjK9kgwGnp1fZQoaAZHQHFpIl2NedFoB0u2aAhHQLjK+gUUO/d1fZQoaAZHQHCp482aUiZoB0vGaAhHQLjK+nmJWNp1fZQoaAZHQG+auogmqo9oB0vCaAhHQLjLAloDgZV1fZQoaAZHQHLMogRsdktoB0vWaAhHQLjLDi7Ciyp1fZQoaAZHQHOBO/cnE2poB0u5aAhHQLjLF5KODJ51fZQoaAZHQHI+1yimEXdoB0vTaAhHQLjLIloUSIx1fZQoaAZHQHIUnv2GqPxoB0vaaAhHQLjLU96C17Z1fZQoaAZHQHBVAn2IwdtoB0u4aAhHQLjLarLhaTx1fZQoaAZHQHCQdR3u/lBoB0u3aAhHQLjLka9sabZ1fZQoaAZHQG2HRfv4M4NoB02rAWgIR0C4y5hChN/OdX2UKGgGR0Bxlo3l0YCRaAdNCQFoCEdAuMvIJw84gnV9lChoBkdAcYx+IMz/ImgHS9VoCEdAuMv+BxxT9HV9lChoBkdAczqQ4jrzG2gHS8NoCEdAuMwizAvcrXV9lChoBkdAcOQF8ohIOGgHS8toCEdAuMw9LpRoAXV9lChoBkdAc4BW6K+BYmgHS8poCEdAuMw+619fC3V9lChoBkdAcEfJN0vGqGgHTTwBaAhHQLjMTzF+/g11fZQoaAZHQHNzG3fAKv5oB0vbaAhHQLjMYaJhvzh1fZQoaAZHQHI+MwHqu8toB0vvaAhHQLjMeoNd7fJ1fZQoaAZHQHJbcG9pRGdoB0vaaAhHQLjMgZamoBJ1fZQoaAZHQHHBVX7tReloB0u8aAhHQLjMhUYsNDt1fZQoaAZHQHIFpIlMRHxoB0v6aAhHQLjMqWUKRdR1fZQoaAZHQG5OyE+PikxoB0uwaAhHQLjMrV81Gb11fZQoaAZHQG9C83++/QBoB0vQaAhHQLjMuZflZHN1fZQoaAZHQHHijqOcUdtoB0vjaAhHQLjNAOYYzi11fZQoaAZHQHEq9NJvo/1oB0vaaAhHQLjNJBacI7h1fZQoaAZHQHDlo/eLvThoB0uuaAhHQLjNNNtIkJN1fZQoaAZHQHIBH4oJAt5oB0vcaAhHQLjNWEWIoE11fZQoaAZHQHIqzbJwKjVoB0vDaAhHQLjNb7TlT3t1fZQoaAZHQHAZyqlxffJoB0vOaAhHQLjNlMWoFV11fZQoaAZHQHNoaVMVUMpoB0vjaAhHQLjNp1rqMWJ1fZQoaAZHQHAl2XPZ7HBoB0vQaAhHQLjNq7Dl5nl1fZQoaAZHQHGe8v24/eNoB0vSaAhHQLjNzqzZ6D51fZQoaAZHQHBXj4YaYNRoB0vYaAhHQLjN1AIY3vR1fZQoaAZHQG6mL0Bfa6BoB0vGaAhHQLjN5bu+h5B1fZQoaAZHQHFOdOdoWYZoB0vaaAhHQLjOBx1PnCB1fZQoaAZHQHHRr/jsD4hoB0v7aAhHQLjOEqyGBWh1fZQoaAZHQHOO3aBZpztoB0vwaAhHQLjOM+aBqbl1fZQoaAZHQHBXTBdld1NoB0u0aAhHQLjOQJdB0IV1fZQoaAZHQHCgknogV45oB0ukaAhHQLjOWsrupjt1fZQoaAZHQHKdp08vEjxoB0vsaAhHQLjOcy5qdpZ1fZQoaAZHQHKvrFbVz6toB0vXaAhHQLjOhgoPTXt1fZQoaAZHQHGHk9yLhrFoB0upaAhHQLjOmhTfixV1fZQoaAZHQGNpWEkB0ZFoB03oA2gIR0C4zp+cUdq+dX2UKGgGR0Bx1yvB7/n4aAdL3WgIR0C4zsXhS9/SdX2UKGgGR0BxSRqL0jC6aAdLy2gIR0C4zt3BciW3dX2UKGgGR0BxO3YukDZEaAdL02gIR0C4zu3iBGx2dX2UKGgGR0Bx1Vew9q1xaAdLzmgIR0C4zwc6q815dX2UKGgGR0Byba+mFajfaAdLtGgIR0C4zyClrM1TdX2UKGgGR0BynHD2rXDnaAdL0WgIR0C4zx8ebNKRdX2UKGgGR0BxyulJpWWAaAdLxmgIR0C4zy94u9OAdX2UKGgGR0Bzk55ooNNKaAdL82gIR0C4zz9p7CzkdX2UKGgGR0Bx92Df3vhIaAdLsmgIR0C4z0nMINVjdX2UKGgGR0ByLIMQVbiZaAdLrGgIR0C4z1qL876pdX2UKGgGR0BvkoT7EYO2aAdLvWgIR0C4z4s3qAz6dX2UKGgGR0ByolvHcUM5aAdL92gIR0C4z6U8vEjxdX2UKGgGR0BwiKKekHlfaAdLwGgIR0C4z6SR4hUzdX2UKGgGR0BwF3S5RTCMaAdLwGgIR0C4z7hJ7LMcdX2UKGgGR0ByBA7q6e5GaAdLuWgIR0C4z/YHxBmgdX2UKGgGR0BulumBOHnEaAdLwGgIR0C40BLN0NjLdX2UKGgGR0Bx3rz4DcM3aAdLy2gIR0C40ELHQyAQdX2UKGgGR0BxnE2vStvGaAdL+2gIR0C40E1ijL0SdX2UKGgGR0BuXHnuAqd6aAdLuWgIR0C40FRmCiAUdWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 372, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVcAIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoB4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoECiWCAAAAAAAAAABAQEBAQEBAZRoFEsIhZRoGHSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBAoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaApLCIWUaBh0lFKUjARoaWdolGgQKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgKSwiFlGgYdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_shape": [8], "low": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "low_repr": "[-90. -90. -5. -5. -3.1415927 -5.\n -0. -0. ]", "high_repr": "[90. 90. 5. 5. 3.1415927 5.\n 1. 1. ]", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.discrete.Discrete'>", ":serialized:": "gAWV1QAAAAAAAACMGWd5bW5hc2l1bS5zcGFjZXMuZGlzY3JldGWUjAhEaXNjcmV0ZZSTlCmBlH2UKIwBbpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCaTiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIBAAAAAAAAACUhpRSlIwFc3RhcnSUaAhoDkMIAAAAAAAAAACUhpRSlIwGX3NoYXBllCloCmgOjApfbnBfcmFuZG9tlE51Yi4=", "n": "4", "start": "0", "_shape": [], "dtype": "int64", "_np_random": null}, "n_envs": 16, "n_steps": 2048, "gamma": 0.99, "gae_lambda": 0.95, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "batch_size": 64, "n_epochs": 12, "clip_range": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "clip_range_vf": null, "normalize_advantage": true, "target_kl": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz8zqSowVTJhhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-5.15.120+-x86_64-with-glibc2.35 # 1 SMP Wed Aug 30 11:19:59 UTC 2023", "Python": "3.10.12", "Stable-Baselines3": "2.0.0a5", "PyTorch": "2.0.1+cu118", "GPU Enabled": "True", "Numpy": "1.23.5", "Cloudpickle": "2.2.1", "Gymnasium": "0.28.1", "OpenAI Gym": "0.25.2"}}
|
lander_model_ppo.zip
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c8ff182300e10d0867b661f0b3fbbea63f0180af6b471bf180473a5a2e5a9f79
|
3 |
+
size 146630
|
lander_model_ppo/data
CHANGED
@@ -4,20 +4,20 @@
|
|
4 |
":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
-
"__init__": "<function ActorCriticPolicy.__init__ at
|
8 |
-
"_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at
|
9 |
-
"reset_noise": "<function ActorCriticPolicy.reset_noise at
|
10 |
-
"_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at
|
11 |
-
"_build": "<function ActorCriticPolicy._build at
|
12 |
-
"forward": "<function ActorCriticPolicy.forward at
|
13 |
-
"extract_features": "<function ActorCriticPolicy.extract_features at
|
14 |
-
"_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at
|
15 |
-
"_predict": "<function ActorCriticPolicy._predict at
|
16 |
-
"evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at
|
17 |
-
"get_distribution": "<function ActorCriticPolicy.get_distribution at
|
18 |
-
"predict_values": "<function ActorCriticPolicy.predict_values at
|
19 |
"__abstractmethods__": "frozenset()",
|
20 |
-
"_abc_impl": "<_abc._abc_data object at
|
21 |
},
|
22 |
"verbose": 1,
|
23 |
"policy_kwargs": {},
|
@@ -26,12 +26,12 @@
|
|
26 |
"_num_timesteps_at_start": 0,
|
27 |
"seed": null,
|
28 |
"action_noise": null,
|
29 |
-
"start_time":
|
30 |
"learning_rate": 0.0003,
|
31 |
"tensorboard_log": null,
|
32 |
"_last_obs": {
|
33 |
":type:": "<class 'numpy.ndarray'>",
|
34 |
-
":serialized:": "
|
35 |
},
|
36 |
"_last_episode_starts": {
|
37 |
":type:": "<class 'numpy.ndarray'>",
|
@@ -45,13 +45,13 @@
|
|
45 |
"_stats_window_size": 100,
|
46 |
"ep_info_buffer": {
|
47 |
":type:": "<class 'collections.deque'>",
|
48 |
-
":serialized:": "
|
49 |
},
|
50 |
"ep_success_buffer": {
|
51 |
":type:": "<class 'collections.deque'>",
|
52 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
53 |
},
|
54 |
-
"_n_updates":
|
55 |
"observation_space": {
|
56 |
":type:": "<class 'gymnasium.spaces.box.Box'>",
|
57 |
":serialized:": "gAWVcAIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoB4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoECiWCAAAAAAAAAABAQEBAQEBAZRoFEsIhZRoGHSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBAoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaApLCIWUaBh0lFKUjARoaWdolGgQKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgKSwiFlGgYdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=",
|
@@ -84,7 +84,7 @@
|
|
84 |
"vf_coef": 0.5,
|
85 |
"max_grad_norm": 0.5,
|
86 |
"batch_size": 64,
|
87 |
-
"n_epochs":
|
88 |
"clip_range": {
|
89 |
":type:": "<class 'function'>",
|
90 |
":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
|
|
|
4 |
":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function ActorCriticPolicy.__init__ at 0x79654eb65240>",
|
8 |
+
"_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x79654eb652d0>",
|
9 |
+
"reset_noise": "<function ActorCriticPolicy.reset_noise at 0x79654eb65360>",
|
10 |
+
"_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x79654eb653f0>",
|
11 |
+
"_build": "<function ActorCriticPolicy._build at 0x79654eb65480>",
|
12 |
+
"forward": "<function ActorCriticPolicy.forward at 0x79654eb65510>",
|
13 |
+
"extract_features": "<function ActorCriticPolicy.extract_features at 0x79654eb655a0>",
|
14 |
+
"_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x79654eb65630>",
|
15 |
+
"_predict": "<function ActorCriticPolicy._predict at 0x79654eb656c0>",
|
16 |
+
"evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x79654eb65750>",
|
17 |
+
"get_distribution": "<function ActorCriticPolicy.get_distribution at 0x79654eb657e0>",
|
18 |
+
"predict_values": "<function ActorCriticPolicy.predict_values at 0x79654eb65870>",
|
19 |
"__abstractmethods__": "frozenset()",
|
20 |
+
"_abc_impl": "<_abc._abc_data object at 0x79654eb07a00>"
|
21 |
},
|
22 |
"verbose": 1,
|
23 |
"policy_kwargs": {},
|
|
|
26 |
"_num_timesteps_at_start": 0,
|
27 |
"seed": null,
|
28 |
"action_noise": null,
|
29 |
+
"start_time": 1696276637957944240,
|
30 |
"learning_rate": 0.0003,
|
31 |
"tensorboard_log": null,
|
32 |
"_last_obs": {
|
33 |
":type:": "<class 'numpy.ndarray'>",
|
34 |
+
":serialized:": "gAWVdQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYAAgAAAAAAAA53lL7xITg/2/I2vtpaIb/UduO+bVwjPQAAAAAAAAAAei99vrxbUT597Wc+lUSHvvLqqL1rDMA9AAAAAAAAAACNHY89KYAquiK8yTfRIXWy3Tutu+5E6rYAAIA/AAAAAJpvi73eZ7o/5Slxvsfpgr6j+xS9RqoCvgAAAAAAAAAAmsd3Peyp/bnNFmy5VdgQtK78lToq7Iw4AACAPwAAgD+AjgG9gAWhP2ir171kQyC/gp2xvAZBeDwAAAAAAAAAAMB9L740xJu8NZl+OqlY0DgKfww+dsetuQAAgD8AAIA/Zqw9PPa4ProL9Fa4/7Dds1DB9To6QHk3AACAPwAAgD/NqKG9KWQTunMrGLipvrGzWI8FO59zMTcAAAAAAAAAADNrbrwJXSc9tp2ZvrcRMr7rjAS+QC4OvAAAAAAAAAAAc0aMPSFGuj3BPgq+exVqvnH8vTrupia9AAAAAAAAAADNIJq70t6Ku+CVpr2ilLI8BazKPPI/l70AAIA/AACAPwCQZDyPBnm6HS9AOzb2kTxrdts6YxB+PQAAgD8AAIA/trpbvpc3wT41zuI9MSK0vg2NBb463do9AAAAAAAAAAAA4+a8hYeHu9WIjT1vVLy6jKfkvDnBoLsAAIA/AACAP2Ytnj2L/ow/kHasPi5nRL9S9O49HpMPPgAAAAAAAAAAlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksQSwiGlIwBQ5R0lFKULg=="
|
35 |
},
|
36 |
"_last_episode_starts": {
|
37 |
":type:": "<class 'numpy.ndarray'>",
|
|
|
45 |
"_stats_window_size": 100,
|
46 |
"ep_info_buffer": {
|
47 |
":type:": "<class 'collections.deque'>",
|
48 |
+
":serialized:": "gAWV5gsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQGKh1qN6w+uMAWyUTegDjAF0lEdAuMfgbCJoCnV9lChoBkdAci1ytV7x/mgHS/5oCEdAuMfkRIz3y3V9lChoBkdAceDP5HmRvGgHS9toCEdAuMfs9kjHGXV9lChoBkdAcl/yGi5/b2gHS8NoCEdAuMf5uaWonHV9lChoBkdAcmoU/OdGzGgHTTsBaAhHQLjIA5IpYtB1fZQoaAZHQHHiJ7PY4AFoB0vhaAhHQLjIQiNbTtt1fZQoaAZHQHIsCQ5myxBoB0vaaAhHQLjIWh5gPVd1fZQoaAZHQHPvZjx0+1VoB0vLaAhHQLjIZG7Bfrt1fZQoaAZHQHJJED2alUJoB0vZaAhHQLjIcztCzC11fZQoaAZHQHLcUxM36yloB0vwaAhHQLjIgV7Qb+91fZQoaAZHQHBrm6bvw3JoB0uvaAhHQLjIj00WM0h1fZQoaAZHQG4argGbCrNoB0u7aAhHQLjIj5WBBiV1fZQoaAZHQHAzTf3vhIhoB0u7aAhHQLjInakhzNl1fZQoaAZHQHLVZXp4bCJoB0vFaAhHQLjIn1eSjg11fZQoaAZHQHDwRz3h4t9oB0vPaAhHQLjIyXdCVr11fZQoaAZHQHE1uWv8qF1oB0uvaAhHQLjI23evZAZ1fZQoaAZHQHEAr6P8yetoB0u4aAhHQLjI998JD3N1fZQoaAZHQHIt7655JK9oB0vhaAhHQLjJCJpFkQR1fZQoaAZHQHPMhnvlU6xoB0vXaAhHQLjJHaYu01J1fZQoaAZHQHECPmcOLBNoB0vMaAhHQLjJI1G9YfZ1fZQoaAZHQHKTLidat9xoB0vEaAhHQLjJYXJYDDF1fZQoaAZHQHLi95dGAkNoB0v3aAhHQLjJbR9w3o91fZQoaAZHQHJqnQUpNK1oB0viaAhHQLjJq6aLGaR1fZQoaAZHQHM3rp3X7LtoB0vZaAhHQLjJubY9Pk91fZQoaAZHQHGSmYOUdJdoB0u9aAhHQLjJvF4LThJ1fZQoaAZHQHMiIV2zOX5oB0voaAhHQLjJw3MY/FB1fZQoaAZHQHEz4kmhM8JoB0uoaAhHQLjJ0L9uP3l1fZQoaAZHQHKDsKXv6TJoB0vXaAhHQLjJ1h5xBE91fZQoaAZHQHNiKDPGACpoB0vQaAhHQLjJ3HavicZ1fZQoaAZHQHFE5OWSlnBoB0vqaAhHQLjJ7rqt5lh1fZQoaAZHQHHH2KQ7tAtoB0u5aAhHQLjJ95OJtSB1fZQoaAZHQHC9g1FYuChoB0u2aAhHQLjKGBBAv+R1fZQoaAZHQHHH6KP4mC1oB0u7aAhHQLjKOAyVObl1fZQoaAZHQHD8D+zdDY1oB0vHaAhHQLjKQ7Z39rJ1fZQoaAZHQHGbLUsnRb9oB0upaAhHQLjKW2Pkq+d1fZQoaAZHQG4tfWDpTuRoB0vHaAhHQLjKmGcnVoZ1fZQoaAZHQHFEP/JeVs1oB0vJaAhHQLjK5Ys/Y8N1fZQoaAZHQHFDBhx5s0poB0vJaAhHQLjK9kgwGnp1fZQoaAZHQHFpIl2NedFoB0u2aAhHQLjK+gUUO/d1fZQoaAZHQHCp482aUiZoB0vGaAhHQLjK+nmJWNp1fZQoaAZHQG+auogmqo9oB0vCaAhHQLjLAloDgZV1fZQoaAZHQHLMogRsdktoB0vWaAhHQLjLDi7Ciyp1fZQoaAZHQHOBO/cnE2poB0u5aAhHQLjLF5KODJ51fZQoaAZHQHI+1yimEXdoB0vTaAhHQLjLIloUSIx1fZQoaAZHQHIUnv2GqPxoB0vaaAhHQLjLU96C17Z1fZQoaAZHQHBVAn2IwdtoB0u4aAhHQLjLarLhaTx1fZQoaAZHQHCQdR3u/lBoB0u3aAhHQLjLka9sabZ1fZQoaAZHQG2HRfv4M4NoB02rAWgIR0C4y5hChN/OdX2UKGgGR0Bxlo3l0YCRaAdNCQFoCEdAuMvIJw84gnV9lChoBkdAcYx+IMz/ImgHS9VoCEdAuMv+BxxT9HV9lChoBkdAczqQ4jrzG2gHS8NoCEdAuMwizAvcrXV9lChoBkdAcOQF8ohIOGgHS8toCEdAuMw9LpRoAXV9lChoBkdAc4BW6K+BYmgHS8poCEdAuMw+619fC3V9lChoBkdAcEfJN0vGqGgHTTwBaAhHQLjMTzF+/g11fZQoaAZHQHNzG3fAKv5oB0vbaAhHQLjMYaJhvzh1fZQoaAZHQHI+MwHqu8toB0vvaAhHQLjMeoNd7fJ1fZQoaAZHQHJbcG9pRGdoB0vaaAhHQLjMgZamoBJ1fZQoaAZHQHHBVX7tReloB0u8aAhHQLjMhUYsNDt1fZQoaAZHQHIFpIlMRHxoB0v6aAhHQLjMqWUKRdR1fZQoaAZHQG5OyE+PikxoB0uwaAhHQLjMrV81Gb11fZQoaAZHQG9C83++/QBoB0vQaAhHQLjMuZflZHN1fZQoaAZHQHHijqOcUdtoB0vjaAhHQLjNAOYYzi11fZQoaAZHQHEq9NJvo/1oB0vaaAhHQLjNJBacI7h1fZQoaAZHQHDlo/eLvThoB0uuaAhHQLjNNNtIkJN1fZQoaAZHQHIBH4oJAt5oB0vcaAhHQLjNWEWIoE11fZQoaAZHQHIqzbJwKjVoB0vDaAhHQLjNb7TlT3t1fZQoaAZHQHAZyqlxffJoB0vOaAhHQLjNlMWoFV11fZQoaAZHQHNoaVMVUMpoB0vjaAhHQLjNp1rqMWJ1fZQoaAZHQHAl2XPZ7HBoB0vQaAhHQLjNq7Dl5nl1fZQoaAZHQHGe8v24/eNoB0vSaAhHQLjNzqzZ6D51fZQoaAZHQHBXj4YaYNRoB0vYaAhHQLjN1AIY3vR1fZQoaAZHQG6mL0Bfa6BoB0vGaAhHQLjN5bu+h5B1fZQoaAZHQHFOdOdoWYZoB0vaaAhHQLjOBx1PnCB1fZQoaAZHQHHRr/jsD4hoB0v7aAhHQLjOEqyGBWh1fZQoaAZHQHOO3aBZpztoB0vwaAhHQLjOM+aBqbl1fZQoaAZHQHBXTBdld1NoB0u0aAhHQLjOQJdB0IV1fZQoaAZHQHCgknogV45oB0ukaAhHQLjOWsrupjt1fZQoaAZHQHKdp08vEjxoB0vsaAhHQLjOcy5qdpZ1fZQoaAZHQHKvrFbVz6toB0vXaAhHQLjOhgoPTXt1fZQoaAZHQHGHk9yLhrFoB0upaAhHQLjOmhTfixV1fZQoaAZHQGNpWEkB0ZFoB03oA2gIR0C4zp+cUdq+dX2UKGgGR0Bx1yvB7/n4aAdL3WgIR0C4zsXhS9/SdX2UKGgGR0BxSRqL0jC6aAdLy2gIR0C4zt3BciW3dX2UKGgGR0BxO3YukDZEaAdL02gIR0C4zu3iBGx2dX2UKGgGR0Bx1Vew9q1xaAdLzmgIR0C4zwc6q815dX2UKGgGR0Byba+mFajfaAdLtGgIR0C4zyClrM1TdX2UKGgGR0BynHD2rXDnaAdL0WgIR0C4zx8ebNKRdX2UKGgGR0BxyulJpWWAaAdLxmgIR0C4zy94u9OAdX2UKGgGR0Bzk55ooNNKaAdL82gIR0C4zz9p7CzkdX2UKGgGR0Bx92Df3vhIaAdLsmgIR0C4z0nMINVjdX2UKGgGR0ByLIMQVbiZaAdLrGgIR0C4z1qL876pdX2UKGgGR0BvkoT7EYO2aAdLvWgIR0C4z4s3qAz6dX2UKGgGR0ByolvHcUM5aAdL92gIR0C4z6U8vEjxdX2UKGgGR0BwiKKekHlfaAdLwGgIR0C4z6SR4hUzdX2UKGgGR0BwF3S5RTCMaAdLwGgIR0C4z7hJ7LMcdX2UKGgGR0ByBA7q6e5GaAdLuWgIR0C4z/YHxBmgdX2UKGgGR0BulumBOHnEaAdLwGgIR0C40BLN0NjLdX2UKGgGR0Bx3rz4DcM3aAdLy2gIR0C40ELHQyAQdX2UKGgGR0BxnE2vStvGaAdL+2gIR0C40E1ijL0SdX2UKGgGR0BuXHnuAqd6aAdLuWgIR0C40FRmCiAUdWUu"
|
49 |
},
|
50 |
"ep_success_buffer": {
|
51 |
":type:": "<class 'collections.deque'>",
|
52 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
53 |
},
|
54 |
+
"_n_updates": 372,
|
55 |
"observation_space": {
|
56 |
":type:": "<class 'gymnasium.spaces.box.Box'>",
|
57 |
":serialized:": "gAWVcAIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWCAAAAAAAAAABAQEBAQEBAZRoB4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksIhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoECiWCAAAAAAAAAABAQEBAQEBAZRoFEsIhZRoGHSUUpSMBl9zaGFwZZRLCIWUjANsb3eUaBAoliAAAAAAAAAAAAC0wgAAtMIAAKDAAACgwNsPScAAAKDAAAAAgAAAAICUaApLCIWUaBh0lFKUjARoaWdolGgQKJYgAAAAAAAAAAAAtEIAALRCAACgQAAAoEDbD0lAAACgQAAAgD8AAIA/lGgKSwiFlGgYdJRSlIwIbG93X3JlcHKUjFtbLTkwLiAgICAgICAgLTkwLiAgICAgICAgIC01LiAgICAgICAgIC01LiAgICAgICAgIC0zLjE0MTU5MjcgIC01LgogIC0wLiAgICAgICAgIC0wLiAgICAgICBdlIwJaGlnaF9yZXBylIxTWzkwLiAgICAgICAgOTAuICAgICAgICAgNS4gICAgICAgICA1LiAgICAgICAgIDMuMTQxNTkyNyAgNS4KICAxLiAgICAgICAgIDEuICAgICAgIF2UjApfbnBfcmFuZG9tlE51Yi4=",
|
|
|
84 |
"vf_coef": 0.5,
|
85 |
"max_grad_norm": 0.5,
|
86 |
"batch_size": 64,
|
87 |
+
"n_epochs": 12,
|
88 |
"clip_range": {
|
89 |
":type:": "<class 'function'>",
|
90 |
":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuEQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz/JmZmZmZmahZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"
|
lander_model_ppo/policy.optimizer.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 87929
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a74a577555af6c5492a43a8e8619dc54c36c70839cf226615d8a76ba1431e032
|
3 |
size 87929
|
lander_model_ppo/policy.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 43329
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:6cfc74017ebe8fb4fa90281ee872fd2099ebc09c8419ab9ecd44e4c028b6681f
|
3 |
size 43329
|
replay.mp4
CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
|
|
results.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"mean_reward":
|
|
|
1 |
+
{"mean_reward": 270.69714910000005, "std_reward": 17.687238632459874, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-10-02T20:25:49.097574"}
|