Amankankriya commited on
Commit
1c00157
1 Parent(s): fe44d40

trained model for HalfCheetah-v4 using DDPG

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ replay.mp4 filter=lfs diff=lfs merge=lfs -text
DDPG-HalfCheetah-v4.zip ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f39c48699fca92312a2d88cf9e8ee38dcbf4e2eddef794eb2a3e6911ff11a532
3
+ size 665707
DDPG-HalfCheetah-v4/_stable_baselines3_version ADDED
@@ -0,0 +1 @@
 
 
1
+ 2.3.2
DDPG-HalfCheetah-v4/actor.optimizer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a2fb48218087cf9dfc1dc1384906aaf6d4c8685cd9ea537945eff479e343f393
3
+ size 161952
DDPG-HalfCheetah-v4/critic.optimizer.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db5993dd9baa61a22f4aee1aa1f6903f7fead1df2987aa799f52fe4fbe17483b
3
+ size 162976
DDPG-HalfCheetah-v4/data ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "policy_class": {
3
+ ":type:": "<class 'abc.ABCMeta'>",
4
+ ":serialized:": "gAWVMAAAAAAAAACMHnN0YWJsZV9iYXNlbGluZXMzLnRkMy5wb2xpY2llc5SMCVREM1BvbGljeZSTlC4=",
5
+ "__module__": "stable_baselines3.td3.policies",
6
+ "__annotations__": "{'actor': <class 'stable_baselines3.td3.policies.Actor'>, 'actor_target': <class 'stable_baselines3.td3.policies.Actor'>, 'critic': <class 'stable_baselines3.common.policies.ContinuousCritic'>, 'critic_target': <class 'stable_baselines3.common.policies.ContinuousCritic'>}",
7
+ "__doc__": "\n Policy class (with both actor and critic) for TD3.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n :param n_critics: Number of critic networks to create.\n :param share_features_extractor: Whether to share or not the features extractor\n between the actor and the critic (this saves computation time)\n ",
8
+ "__init__": "<function TD3Policy.__init__ at 0x3181c6f70>",
9
+ "_build": "<function TD3Policy._build at 0x3181d2040>",
10
+ "_get_constructor_parameters": "<function TD3Policy._get_constructor_parameters at 0x3181d20d0>",
11
+ "make_actor": "<function TD3Policy.make_actor at 0x3181d2160>",
12
+ "make_critic": "<function TD3Policy.make_critic at 0x3181d21f0>",
13
+ "forward": "<function TD3Policy.forward at 0x3181d2280>",
14
+ "_predict": "<function TD3Policy._predict at 0x3181d2310>",
15
+ "set_training_mode": "<function TD3Policy.set_training_mode at 0x3181d23a0>",
16
+ "__abstractmethods__": "frozenset()",
17
+ "_abc_impl": "<_abc._abc_data object at 0x3181ceb00>"
18
+ },
19
+ "verbose": 1,
20
+ "policy_kwargs": {
21
+ ":type:": "<class 'dict'>",
22
+ ":serialized:": "gAWVcwAAAAAAAAB9lCiMDWFjdGl2YXRpb25fZm6UjBt0b3JjaC5ubi5tb2R1bGVzLmFjdGl2YXRpb26UjARSZUxVlJOUjAhuZXRfYXJjaJR9lCiMAnBplF2UKEuAS4BljAJxZpRdlChLgEuAZXWMCW5fY3JpdGljc5RLAXUu",
23
+ "activation_fn": "<class 'torch.nn.modules.activation.ReLU'>",
24
+ "net_arch": {
25
+ "pi": [
26
+ 128,
27
+ 128
28
+ ],
29
+ "qf": [
30
+ 128,
31
+ 128
32
+ ]
33
+ },
34
+ "n_critics": 1
35
+ },
36
+ "num_timesteps": 2395407,
37
+ "_total_timesteps": 5000000,
38
+ "_num_timesteps_at_start": 0,
39
+ "seed": null,
40
+ "action_noise": null,
41
+ "start_time": 1718010746217830000,
42
+ "learning_rate": 0.00032,
43
+ "tensorboard_log": null,
44
+ "_last_obs": {
45
+ ":type:": "<class 'numpy.ndarray'>",
46
+ ":serialized:": "gAWV/QAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJaIAAAAAAAAAHih2mYmqrS/86IAZbZ7xz+dM073Ki26PxTyLbGc5OG/m3QE+K/31L80vwbXTajKPyMfRx2sWeU/t75ru+oU4j+K3q8Wy6giQGj/J3leDfI/b1w8ZikU4T/n6zwoEpczwK20YHYP+QPAMx4myfBLH8CJ9O0Z2+ogwOjsRcYcBjhA0FkoVgRZ/L+UjAVudW1weZSMBWR0eXBllJOUjAJmOJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwFLEYaUjAFDlHSUUpQu"
47
+ },
48
+ "_last_episode_starts": {
49
+ ":type:": "<class 'numpy.ndarray'>",
50
+ ":serialized:": "gAWVdAAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYBAAAAAAAAAAGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwGFlIwBQ5R0lFKULg=="
51
+ },
52
+ "_last_original_obs": {
53
+ ":type:": "<class 'numpy.ndarray'>",
54
+ ":serialized:": "gAWV/QAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJaIAAAAAAAAAP9Crtkevr6/ETnZnk4duD+8FddlnvLpP1fZ6tONh9C/rqE/FApcvT+/oQxzgODkP/NM1Ow68+G/HMMxEjbKwD+AbhcxXisoQGWJywbcTuu/36X1Xv1p/j+HriW9D47/P6toFUmuES/A6sTxFD0HGsATx4iH4AHfv6xepTclCTNAE3KaC56UJkCUjAVudW1weZSMBWR0eXBllJOUjAJmOJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwFLEYaUjAFDlHSUUpQu"
55
+ },
56
+ "_episode_num": 2395,
57
+ "use_sde": false,
58
+ "sde_sample_freq": -1,
59
+ "_current_progress_remaining": 0.5209186,
60
+ "_stats_window_size": 100,
61
+ "ep_info_buffer": {
62
+ ":type:": "<class 'collections.deque'>",
63
+ ":serialized:": "gAWVRAwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQMGh3nw5NoKMAWyUTegDjAF0lEdAt5aum78Nx3V9lChoBkdAlNeN7fHgg2gHTegDaAhHQLeZBP9kz411fZQoaAZHQMIk7VdonKJoB03oA2gIR0C3m1NeQdS3dX2UKGgGR0DBykkiwB5paAdN6ANoCEdAt52q0MPSUnV9lChoBkdAwZ2gW6bvw2gHTegDaAhHQLegCMAFPi11fZQoaAZHQMGr8WUbDMxoB03oA2gIR0C3olotUXHjdX2UKGgGR0DCEWn/o7muaAdN6ANoCEdAt6SoRDkU9XV9lChoBkdAwrc0WmgrY2gHTegDaAhHQLem+vM8ox51fZQoaAZHQMLGm5Etuk1oB03oA2gIR0C3qUxWgezVdX2UKGgGR0DChyBSLqD9aAdN6ANoCEdAt6udMlC1JHV9lChoBkdAwYihMdLg42gHTegDaAhHQLet6ltCRfZ1fZQoaAZHQMMOP/W1+iJoB03oA2gIR0C3sDdfgJkYdX2UKGgGR0DCviTZOBUaaAdN6ANoCEdAt7KMovzvqnV9lChoBkdAwtKqnhsImmgHTegDaAhHQLe04ef7Jnx1fZQoaAZHQMJ4CV2JSBNoB03oA2gIR0C3tzFpXZGsdX2UKGgGR0DCPj5/EwWWaAdN6ANoCEdAt7mF45cTrXV9lChoBkdAwn+qYXO4X2gHTegDaAhHQLe72MY/FBJ1fZQoaAZHQMD5BS9VWCFoB03oA2gIR0C3vinmzSkTdX2UKGgGR0DCR3/UONHZaAdN6ANoCEdAt8CBHxz7uXV9lChoBkdAwoofE+gUUWgHTegDaAhHQLfC1pWV/tp1fZQoaAZHQMK/Vc4o7V9oB03oA2gIR0C3xSdjwx33dX2UKGgGR0DCCCTWK/EgaAdN6ANoCEdAt8eDq4YrKHV9lChoBkdAwkeXUFSsKmgHTegDaAhHQLfJ1+Idlup1fZQoaAZHQMJ9tYQSSNhoB03oA2gIR0C3zCz7uUlidX2UKGgGR0DCeZKSgXdkaAdN6ANoCEdAt85/tBv733V9lChoBkdAwn9EFHrhSGgHTegDaAhHQLfQ1edkJ8h1fZQoaAZHQMJpSsySFGpoB03oA2gIR0C30yqioKlYdX2UKGgGR0DCV6e87IT5aAdN6ANoCEdAt9V/IhhYvHV9lChoBkdAwkGBogV45mgHTegDaAhHQLfXz/d69kB1fZQoaAZHQMKH5ZGjKxNoB03oA2gIR0C32iAqiGnGdX2UKGgGR0DCG7xiXpnpaAdN6ANoCEdAt9xw+iaiK3V9lChoBkdAwrotOWSlnGgHTegDaAhHQLfewANXo1V1fZQoaAZHQMJh4aFM7EJoB03oA2gIR0C34RBASnLrdX2UKGgGR0DBokiZYxL1aAdN6ANoCEdAt+NgsiB5HHV9lChoBkdAwjGFohY/3WgHTegDaAhHQLflq4D9wWF1fZQoaAZHQMF1kvkili1oB03oA2gIR0C35/WCZnctdX2UKGgGR0DCHzrMA3kxaAdN6ANoCEdAt+pAymALA3V9lChoBkdAwjW/8JD3NGgHTegDaAhHQLfslufVZs91fZQoaAZHQMHtvDhcZ+BoB03oA2gIR0C37uO9rXUZdX2UKGgGR0DCZY8pobn6aAdN6ANoCEdAt/E8qqfe13V9lChoBkdAwhsSgq3EymgHTegDaAhHQLfzl9NN8E51fZQoaAZHQMIcOCQLeANoB03oA2gIR0C39e3pB5X2dX2UKGgGR0DCaK8nZ00WaAdN6ANoCEdAt/hAxfv4NHV9lChoBkdAwf57ERaouWgHTegDaAhHQLf6lH6uW8h1fZQoaAZHQMKHBVbzK9xoB03oA2gIR0C3/OuUyHmBdX2UKGgGR0DDHXC7kGRnaAdN6ANoCEdAt/8/gKnei3V9lChoBkdAwivX4YaYNWgHTegDaAhHQLgBkyJbdJt1fZQoaAZHQMH45Bw++uhoB03oA2gIR0C4A+fT5O8DdX2UKGgGR0DBdN7UExIraAdN6ANoCEdAuAZBHUc4pHV9lChoBkdAwkc9c1wYL2gHTegDaAhHQLgInjjJdSl1fZQoaAZHQMF7DQE6kqNoB03oA2gIR0C4CvL8ejmCdX2UKGgGR0DCAMp6Uqx1aAdN6ANoCEdAuA1I8p1A7nV9lChoBkdAwdNwUahpQGgHTegDaAhHQLgPnqDsdDJ1fZQoaAZHQMH3ntEw35xoB03oA2gIR0C4Efd+5OJtdX2UKGgGR0DCGu0xj8UFaAdN6ANoCEdAuBRN2t+1B3V9lChoBkdAwcGnC/GlymgHTegDaAhHQLgWputwJgN1fZQoaAZHQMHJELjxTbZoB03oA2gIR0C4GQBdpqREdX2UKGgGR0DCUBJ8neBQaAdN6ANoCEdAuBtZUZNwi3V9lChoBkdAwjvDEKE39GgHTegDaAhHQLgdsDuSfUZ1fZQoaAZHQMFRjanBLwpoB03oA2gIR0C4IAY6wMYudX2UKGgGR0DCxi5f2K2saAdN6ANoCEdAuCJaQYDT0HV9lChoBkdAwntz/zasZGgHTegDaAhHQLgks9zwMH91fZQoaAZHQMJbtotL+P1oB03oA2gIR0C4JwaIWP92dX2UKGgGR0DCOwfvSc9XaAdN6ANoCEdAuCldGpda+3V9lChoBkdAwtARa1TisGgHTegDaAhHQLgrtPBSDRN1fZQoaAZHQMHQtv0Zm7JoB03oA2gIR0C4LgpzHS4OdX2UKGgGR0DCjrsaKk2xaAdN6ANoCEdAuDBfSQYDT3V9lChoBkdAwqB4xyGSIWgHTegDaAhHQLgys1DjR2N1fZQoaAZHQMGwsoLG7z1oB03oA2gIR0C4NREdRzikdX2UKGgGR0Cm0eWTgVGkaAdN6ANoCEdAuDdrXtjTa3V9lChoBkdAwpwvTDwYtWgHTegDaAhHQLg5vwA2hqV1fZQoaAZHQMLKDpnHvMNoB03oA2gIR0C4PBWw7kn1dX2UKGgGR0DB84yt/4IsaAdN6ANoCEdAuD5qp4rz5HV9lChoBkdAwnrj9qDbrWgHTegDaAhHQLhAyapgkTp1fZQoaAZHQMIuXSn+AEtoB03oA2gIR0C4QyF2icoZdX2UKGgGR0DCxFugctGvaAdN6ANoCEdAuEV6fWcz7HV9lChoBkdAwgQj0L+glGgHTegDaAhHQLhH08W9DhN1fZQoaAZHQMMpWhCD28JoB03oA2gIR0C4Si5XuE26dX2UKGgGR0DC9R3bXYlIaAdN6ANoCEdAuEyETviLl3V9lChoBkdAwtEbukUKzGgHTegDaAhHQLhO3OZ9d/t1fZQoaAZHQMLCMKYiPhhoB03oA2gIR0C4UTIChew+dX2UKGgGR0DCC1XYL9deaAdN6ANoCEdAuFOLLr5ZbXV9lChoBkdAwUGDst03fmgHTegDaAhHQLhV4Z8KG+N1fZQoaAZHQMIIVbAckt5oB03oA2gIR0C4WDnEyckMdX2UKGgGR0DCdBZnrY5DaAdN6ANoCEdAuFqPWy1NQHV9lChoBkdAwvbrQcghbGgHTegDaAhHQLhc+nHNorZ1fZQoaAZHQMIUALkS26VoB03oA2gIR0C4X1JmEoOQdX2UKGgGR0DCXO3vhIe6aAdN6ANoCEdAuGGqYQarFXV9lChoBkdAwkWfdPci4mgHTegDaAhHQLhkAlqJuVJ1fZQoaAZHQMF9Bt6PbPBoB03oA2gIR0C4ZmBib2DhdX2UKGgGR0DCrAj8ejmCaAdN6ANoCEdAuGi6zqrzXnV9lChoBkdAws3Ag4ffXWgHTegDaAhHQLhrF9JBgNR1fZQoaAZHQMHeJbyhBZ9oB03oA2gIR0C4bXVXq7iAdX2UKGgGR0DCPS9VcUudaAdN6ANoCEdAuG/VddE9dXV9lChoBkdAwhceS/0ulGgHTegDaAhHQLhyMruYx+N1fZQoaAZHQMIRdWrOqvNoB03oA2gIR0C4dKukHlfadX2UKGgGR0Ci6WiZF5OaaAdN6ANoCEdAuHcFJNCZ4XV9lChoBkdAwlZuAmzBymgHTegDaAhHQLh5YsMRYih1fZQoaAZHQMFd6xBu4w1oB03oA2gIR0C4h4BODaoNdX2UKGgGR0DCm83TEzfraAdN6ANoCEdAuInWKNyYHHVlLg=="
64
+ },
65
+ "ep_success_buffer": {
66
+ ":type:": "<class 'collections.deque'>",
67
+ ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
68
+ },
69
+ "_n_updates": 2395307,
70
+ "observation_space": {
71
+ ":type:": "<class 'gymnasium.spaces.box.Box'>",
72
+ ":serialized:": "gAWVsQIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY4lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWEQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksRhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWEQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJRoFUsRhZRoGXSUUpSMBl9zaGFwZZRLEYWUjANsb3eUaBEologAAAAAAAAAAAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/5RoC0sRhZRoGXSUUpSMBGhpZ2iUaBEologAAAAAAAAAAAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwf5RoC0sRhZRoGXSUUpSMCGxvd19yZXBylIwELWluZpSMCWhpZ2hfcmVwcpSMA2luZpSMCl9ucF9yYW5kb22UTnViLg==",
73
+ "dtype": "float64",
74
+ "bounded_below": "[False False False False False False False False False False False False\n False False False False False]",
75
+ "bounded_above": "[False False False False False False False False False False False False\n False False False False False]",
76
+ "_shape": [
77
+ 17
78
+ ],
79
+ "low": "[-inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf]",
80
+ "high": "[inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf]",
81
+ "low_repr": "-inf",
82
+ "high_repr": "inf",
83
+ "_np_random": null
84
+ },
85
+ "action_space": {
86
+ ":type:": "<class 'gymnasium.spaces.box.Box'>",
87
+ ":serialized:": "gAWVfwIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWBgAAAAAAAAABAQEBAQGUaAiMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLBoWUjAFDlHSUUpSMDWJvdW5kZWRfYWJvdmWUaBEolgYAAAAAAAAAAQEBAQEBlGgVSwaFlGgZdJRSlIwGX3NoYXBllEsGhZSMA2xvd5RoESiWGAAAAAAAAAAAAIC/AACAvwAAgL8AAIC/AACAvwAAgL+UaAtLBoWUaBl0lFKUjARoaWdolGgRKJYYAAAAAAAAAAAAgD8AAIA/AACAPwAAgD8AAIA/AACAP5RoC0sGhZRoGXSUUpSMCGxvd19yZXBylIwELTEuMJSMCWhpZ2hfcmVwcpSMAzEuMJSMCl9ucF9yYW5kb22UjBRudW1weS5yYW5kb20uX3BpY2tsZZSMEF9fZ2VuZXJhdG9yX2N0b3KUk5SMBVBDRzY0lGgyjBRfX2JpdF9nZW5lcmF0b3JfY3RvcpSTlIaUUpR9lCiMDWJpdF9nZW5lcmF0b3KUjAVQQ0c2NJSMBXN0YXRllH2UKGg9ihA6z5GHrnLfBeG/QkZaiCUzjANpbmOUihGlluXzqdyrAUtuDLN5lNnYAHWMCmhhc191aW50MzKUSwCMCHVpbnRlZ2VylEsAdWJ1Yi4=",
88
+ "dtype": "float32",
89
+ "bounded_below": "[ True True True True True True]",
90
+ "bounded_above": "[ True True True True True True]",
91
+ "_shape": [
92
+ 6
93
+ ],
94
+ "low": "[-1. -1. -1. -1. -1. -1.]",
95
+ "high": "[1. 1. 1. 1. 1. 1.]",
96
+ "low_repr": "-1.0",
97
+ "high_repr": "1.0",
98
+ "_np_random": "Generator(PCG64)"
99
+ },
100
+ "n_envs": 1,
101
+ "buffer_size": 1000000,
102
+ "batch_size": 100,
103
+ "learning_starts": 100,
104
+ "tau": 0.05,
105
+ "gamma": 0.99,
106
+ "gradient_steps": 1,
107
+ "optimize_memory_usage": false,
108
+ "replay_buffer_class": {
109
+ ":type:": "<class 'abc.ABCMeta'>",
110
+ ":serialized:": "gAWVNQAAAAAAAACMIHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5idWZmZXJzlIwMUmVwbGF5QnVmZmVylJOULg==",
111
+ "__module__": "stable_baselines3.common.buffers",
112
+ "__annotations__": "{'observations': <class 'numpy.ndarray'>, 'next_observations': <class 'numpy.ndarray'>, 'actions': <class 'numpy.ndarray'>, 'rewards': <class 'numpy.ndarray'>, 'dones': <class 'numpy.ndarray'>, 'timeouts': <class 'numpy.ndarray'>}",
113
+ "__doc__": "\n Replay buffer used in off-policy algorithms like SAC/TD3.\n\n :param buffer_size: Max number of element in the buffer\n :param observation_space: Observation space\n :param action_space: Action space\n :param device: PyTorch device\n :param n_envs: Number of parallel environments\n :param optimize_memory_usage: Enable a memory efficient variant\n of the replay buffer which reduces by almost a factor two the memory used,\n at a cost of more complexity.\n See https://github.com/DLR-RM/stable-baselines3/issues/37#issuecomment-637501195\n and https://github.com/DLR-RM/stable-baselines3/pull/28#issuecomment-637559274\n Cannot be used in combination with handle_timeout_termination.\n :param handle_timeout_termination: Handle timeout termination (due to timelimit)\n separately and treat the task as infinite horizon task.\n https://github.com/DLR-RM/stable-baselines3/issues/284\n ",
114
+ "__init__": "<function ReplayBuffer.__init__ at 0x318100a60>",
115
+ "add": "<function ReplayBuffer.add at 0x318100af0>",
116
+ "sample": "<function ReplayBuffer.sample at 0x318100b80>",
117
+ "_get_samples": "<function ReplayBuffer._get_samples at 0x318100c10>",
118
+ "_maybe_cast_dtype": "<staticmethod object at 0x3180f6a90>",
119
+ "__abstractmethods__": "frozenset()",
120
+ "_abc_impl": "<_abc._abc_data object at 0x3180fee80>"
121
+ },
122
+ "replay_buffer_kwargs": {},
123
+ "train_freq": {
124
+ ":type:": "<class 'stable_baselines3.common.type_aliases.TrainFreq'>",
125
+ ":serialized:": "gAWVYQAAAAAAAACMJXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi50eXBlX2FsaWFzZXOUjAlUcmFpbkZyZXGUk5RLAWgAjBJUcmFpbkZyZXF1ZW5jeVVuaXSUk5SMBHN0ZXCUhZRSlIaUgZQu"
126
+ },
127
+ "use_sde_at_warmup": false,
128
+ "policy_delay": 1,
129
+ "target_noise_clip": 0.0,
130
+ "target_policy_noise": 0.1,
131
+ "lr_schedule": {
132
+ ":type:": "<class 'function'>",
133
+ ":serialized:": "gAWVwwMAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLA0sTQwx0AIgAfACDAYMBUwCUToWUjAVmbG9hdJSFlIwScHJvZ3Jlc3NfcmVtYWluaW5nlIWUjGovVXNlcnMvYW1hbmthbmtyaXlhL0RvY3VtZW50cy9EZWVwIFJML2RlZXBybC9saWIvcHl0aG9uMy45L3NpdGUtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lIwIPGxhbWJkYT6US2FDAJSMDnZhbHVlX3NjaGVkdWxllIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMai9Vc2Vycy9hbWFua2Fua3JpeWEvRG9jdW1lbnRzL0RlZXAgUkwvZGVlcHJsL2xpYi9weXRob24zLjkvc2l0ZS1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUdU5OaACMEF9tYWtlX2VtcHR5X2NlbGyUk5QpUpSFlHSUUpRoAIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaCF9lH2UKGgYaA+MDF9fcXVhbG5hbWVfX5SMIWdldF9zY2hlZHVsZV9mbi48bG9jYWxzPi48bGFtYmRhPpSMD19fYW5ub3RhdGlvbnNfX5R9lIwOX19rd2RlZmF1bHRzX1+UTowMX19kZWZhdWx0c19flE6MCl9fbW9kdWxlX1+UaBmMB19fZG9jX1+UTowLX19jbG9zdXJlX1+UaACMCl9tYWtlX2NlbGyUk5RoAihoByhLAUsASwBLAUsBSxNDBIgAUwCUaAkpjAFflIWUaA6MBGZ1bmOUS4VDAgABlIwDdmFslIWUKXSUUpRoFU5OaB0pUpSFlHSUUpRoI2g9fZR9lChoGGg0aCaMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUaCh9lGgqTmgrTmgsaBloLU5oLmgwRz80+LWI42jxhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjCFlFKUhZRoRV2UaEd9lHWGlIZSMC4="
134
+ },
135
+ "actor_batch_norm_stats": [],
136
+ "critic_batch_norm_stats": [],
137
+ "actor_batch_norm_stats_target": [],
138
+ "critic_batch_norm_stats_target": []
139
+ }
DDPG-HalfCheetah-v4/policy.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a61bd4a13820479fdae50c8cf49401c7a9f6e529c0994bec62bb3a57c531047
3
+ size 322638
DDPG-HalfCheetah-v4/pytorch_variables.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ebdad4b9cfe9cd22a3abadb5623bf7bb1f6eb2e408740245eb3f2044b0adc018
3
+ size 864
DDPG-HalfCheetah-v4/system_info.txt ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ - OS: macOS-14.5-arm64-arm-64bit Darwin Kernel Version 23.5.0: Wed May 1 20:19:05 PDT 2024; root:xnu-10063.121.3~5/RELEASE_ARM64_T8112
2
+ - Python: 3.9.6
3
+ - Stable-Baselines3: 2.3.2
4
+ - PyTorch: 2.3.0
5
+ - GPU Enabled: False
6
+ - Numpy: 1.26.4
7
+ - Cloudpickle: 3.0.0
8
+ - Gymnasium: 0.29.1
README.md ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: stable-baselines3
3
+ tags:
4
+ - HalfCheetah-v4
5
+ - deep-reinforcement-learning
6
+ - reinforcement-learning
7
+ - stable-baselines3
8
+ model-index:
9
+ - name: DDPG
10
+ results:
11
+ - task:
12
+ type: reinforcement-learning
13
+ name: reinforcement-learning
14
+ dataset:
15
+ name: HalfCheetah-v4
16
+ type: HalfCheetah-v4
17
+ metrics:
18
+ - type: mean_reward
19
+ value: 9140.96 +/- 218.23
20
+ name: mean_reward
21
+ verified: false
22
+ ---
23
+
24
+ # **DDPG** Agent playing **HalfCheetah-v4**
25
+ This is a trained model of a **DDPG** agent playing **HalfCheetah-v4**
26
+ using the [stable-baselines3 library](https://github.com/DLR-RM/stable-baselines3).
27
+
28
+ ## Usage (with Stable-baselines3)
29
+ TODO: Add your code
30
+
31
+
32
+ ```python
33
+ from stable_baselines3 import ...
34
+ from huggingface_sb3 import load_from_hub
35
+
36
+ ...
37
+ ```
config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVMAAAAAAAAACMHnN0YWJsZV9iYXNlbGluZXMzLnRkMy5wb2xpY2llc5SMCVREM1BvbGljeZSTlC4=", "__module__": "stable_baselines3.td3.policies", "__annotations__": "{'actor': <class 'stable_baselines3.td3.policies.Actor'>, 'actor_target': <class 'stable_baselines3.td3.policies.Actor'>, 'critic': <class 'stable_baselines3.common.policies.ContinuousCritic'>, 'critic_target': <class 'stable_baselines3.common.policies.ContinuousCritic'>}", "__doc__": "\n Policy class (with both actor and critic) for TD3.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n :param n_critics: Number of critic networks to create.\n :param share_features_extractor: Whether to share or not the features extractor\n between the actor and the critic (this saves computation time)\n ", "__init__": "<function TD3Policy.__init__ at 0x3181c6f70>", "_build": "<function TD3Policy._build at 0x3181d2040>", "_get_constructor_parameters": "<function TD3Policy._get_constructor_parameters at 0x3181d20d0>", "make_actor": "<function TD3Policy.make_actor at 0x3181d2160>", "make_critic": "<function TD3Policy.make_critic at 0x3181d21f0>", "forward": "<function TD3Policy.forward at 0x3181d2280>", "_predict": "<function TD3Policy._predict at 0x3181d2310>", "set_training_mode": "<function TD3Policy.set_training_mode at 0x3181d23a0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x3181ceb00>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVcwAAAAAAAAB9lCiMDWFjdGl2YXRpb25fZm6UjBt0b3JjaC5ubi5tb2R1bGVzLmFjdGl2YXRpb26UjARSZUxVlJOUjAhuZXRfYXJjaJR9lCiMAnBplF2UKEuAS4BljAJxZpRdlChLgEuAZXWMCW5fY3JpdGljc5RLAXUu", "activation_fn": "<class 'torch.nn.modules.activation.ReLU'>", "net_arch": {"pi": [128, 128], "qf": [128, 128]}, "n_critics": 1}, "num_timesteps": 2395407, "_total_timesteps": 5000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1718010746217830000, "learning_rate": 0.00032, "tensorboard_log": null, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWV/QAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJaIAAAAAAAAAHih2mYmqrS/86IAZbZ7xz+dM073Ki26PxTyLbGc5OG/m3QE+K/31L80vwbXTajKPyMfRx2sWeU/t75ru+oU4j+K3q8Wy6giQGj/J3leDfI/b1w8ZikU4T/n6zwoEpczwK20YHYP+QPAMx4myfBLH8CJ9O0Z2+ogwOjsRcYcBjhA0FkoVgRZ/L+UjAVudW1weZSMBWR0eXBllJOUjAJmOJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwFLEYaUjAFDlHSUUpQu"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdAAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYBAAAAAAAAAAGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwGFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWV/QAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJaIAAAAAAAAAP9Crtkevr6/ETnZnk4duD+8FddlnvLpP1fZ6tONh9C/rqE/FApcvT+/oQxzgODkP/NM1Ow68+G/HMMxEjbKwD+AbhcxXisoQGWJywbcTuu/36X1Xv1p/j+HriW9D47/P6toFUmuES/A6sTxFD0HGsATx4iH4AHfv6xepTclCTNAE3KaC56UJkCUjAVudW1weZSMBWR0eXBllJOUjAJmOJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwFLEYaUjAFDlHSUUpQu"}, "_episode_num": 2395, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.5209186, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVRAwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQMGh3nw5NoKMAWyUTegDjAF0lEdAt5aum78Nx3V9lChoBkdAlNeN7fHgg2gHTegDaAhHQLeZBP9kz411fZQoaAZHQMIk7VdonKJoB03oA2gIR0C3m1NeQdS3dX2UKGgGR0DBykkiwB5paAdN6ANoCEdAt52q0MPSUnV9lChoBkdAwZ2gW6bvw2gHTegDaAhHQLegCMAFPi11fZQoaAZHQMGr8WUbDMxoB03oA2gIR0C3olotUXHjdX2UKGgGR0DCEWn/o7muaAdN6ANoCEdAt6SoRDkU9XV9lChoBkdAwrc0WmgrY2gHTegDaAhHQLem+vM8ox51fZQoaAZHQMLGm5Etuk1oB03oA2gIR0C3qUxWgezVdX2UKGgGR0DChyBSLqD9aAdN6ANoCEdAt6udMlC1JHV9lChoBkdAwYihMdLg42gHTegDaAhHQLet6ltCRfZ1fZQoaAZHQMMOP/W1+iJoB03oA2gIR0C3sDdfgJkYdX2UKGgGR0DCviTZOBUaaAdN6ANoCEdAt7KMovzvqnV9lChoBkdAwtKqnhsImmgHTegDaAhHQLe04ef7Jnx1fZQoaAZHQMJ4CV2JSBNoB03oA2gIR0C3tzFpXZGsdX2UKGgGR0DCPj5/EwWWaAdN6ANoCEdAt7mF45cTrXV9lChoBkdAwn+qYXO4X2gHTegDaAhHQLe72MY/FBJ1fZQoaAZHQMD5BS9VWCFoB03oA2gIR0C3vinmzSkTdX2UKGgGR0DCR3/UONHZaAdN6ANoCEdAt8CBHxz7uXV9lChoBkdAwoofE+gUUWgHTegDaAhHQLfC1pWV/tp1fZQoaAZHQMK/Vc4o7V9oB03oA2gIR0C3xSdjwx33dX2UKGgGR0DCCCTWK/EgaAdN6ANoCEdAt8eDq4YrKHV9lChoBkdAwkeXUFSsKmgHTegDaAhHQLfJ1+Idlup1fZQoaAZHQMJ9tYQSSNhoB03oA2gIR0C3zCz7uUlidX2UKGgGR0DCeZKSgXdkaAdN6ANoCEdAt85/tBv733V9lChoBkdAwn9EFHrhSGgHTegDaAhHQLfQ1edkJ8h1fZQoaAZHQMJpSsySFGpoB03oA2gIR0C30yqioKlYdX2UKGgGR0DCV6e87IT5aAdN6ANoCEdAt9V/IhhYvHV9lChoBkdAwkGBogV45mgHTegDaAhHQLfXz/d69kB1fZQoaAZHQMKH5ZGjKxNoB03oA2gIR0C32iAqiGnGdX2UKGgGR0DCG7xiXpnpaAdN6ANoCEdAt9xw+iaiK3V9lChoBkdAwrotOWSlnGgHTegDaAhHQLfewANXo1V1fZQoaAZHQMJh4aFM7EJoB03oA2gIR0C34RBASnLrdX2UKGgGR0DBokiZYxL1aAdN6ANoCEdAt+NgsiB5HHV9lChoBkdAwjGFohY/3WgHTegDaAhHQLflq4D9wWF1fZQoaAZHQMF1kvkili1oB03oA2gIR0C35/WCZnctdX2UKGgGR0DCHzrMA3kxaAdN6ANoCEdAt+pAymALA3V9lChoBkdAwjW/8JD3NGgHTegDaAhHQLfslufVZs91fZQoaAZHQMHtvDhcZ+BoB03oA2gIR0C37uO9rXUZdX2UKGgGR0DCZY8pobn6aAdN6ANoCEdAt/E8qqfe13V9lChoBkdAwhsSgq3EymgHTegDaAhHQLfzl9NN8E51fZQoaAZHQMIcOCQLeANoB03oA2gIR0C39e3pB5X2dX2UKGgGR0DCaK8nZ00WaAdN6ANoCEdAt/hAxfv4NHV9lChoBkdAwf57ERaouWgHTegDaAhHQLf6lH6uW8h1fZQoaAZHQMKHBVbzK9xoB03oA2gIR0C3/OuUyHmBdX2UKGgGR0DDHXC7kGRnaAdN6ANoCEdAt/8/gKnei3V9lChoBkdAwivX4YaYNWgHTegDaAhHQLgBkyJbdJt1fZQoaAZHQMH45Bw++uhoB03oA2gIR0C4A+fT5O8DdX2UKGgGR0DBdN7UExIraAdN6ANoCEdAuAZBHUc4pHV9lChoBkdAwkc9c1wYL2gHTegDaAhHQLgInjjJdSl1fZQoaAZHQMF7DQE6kqNoB03oA2gIR0C4CvL8ejmCdX2UKGgGR0DCAMp6Uqx1aAdN6ANoCEdAuA1I8p1A7nV9lChoBkdAwdNwUahpQGgHTegDaAhHQLgPnqDsdDJ1fZQoaAZHQMH3ntEw35xoB03oA2gIR0C4Efd+5OJtdX2UKGgGR0DCGu0xj8UFaAdN6ANoCEdAuBRN2t+1B3V9lChoBkdAwcGnC/GlymgHTegDaAhHQLgWputwJgN1fZQoaAZHQMHJELjxTbZoB03oA2gIR0C4GQBdpqREdX2UKGgGR0DCUBJ8neBQaAdN6ANoCEdAuBtZUZNwi3V9lChoBkdAwjvDEKE39GgHTegDaAhHQLgdsDuSfUZ1fZQoaAZHQMFRjanBLwpoB03oA2gIR0C4IAY6wMYudX2UKGgGR0DCxi5f2K2saAdN6ANoCEdAuCJaQYDT0HV9lChoBkdAwntz/zasZGgHTegDaAhHQLgks9zwMH91fZQoaAZHQMJbtotL+P1oB03oA2gIR0C4JwaIWP92dX2UKGgGR0DCOwfvSc9XaAdN6ANoCEdAuCldGpda+3V9lChoBkdAwtARa1TisGgHTegDaAhHQLgrtPBSDRN1fZQoaAZHQMHQtv0Zm7JoB03oA2gIR0C4LgpzHS4OdX2UKGgGR0DCjrsaKk2xaAdN6ANoCEdAuDBfSQYDT3V9lChoBkdAwqB4xyGSIWgHTegDaAhHQLgys1DjR2N1fZQoaAZHQMGwsoLG7z1oB03oA2gIR0C4NREdRzikdX2UKGgGR0Cm0eWTgVGkaAdN6ANoCEdAuDdrXtjTa3V9lChoBkdAwpwvTDwYtWgHTegDaAhHQLg5vwA2hqV1fZQoaAZHQMLKDpnHvMNoB03oA2gIR0C4PBWw7kn1dX2UKGgGR0DB84yt/4IsaAdN6ANoCEdAuD5qp4rz5HV9lChoBkdAwnrj9qDbrWgHTegDaAhHQLhAyapgkTp1fZQoaAZHQMIuXSn+AEtoB03oA2gIR0C4QyF2icoZdX2UKGgGR0DCxFugctGvaAdN6ANoCEdAuEV6fWcz7HV9lChoBkdAwgQj0L+glGgHTegDaAhHQLhH08W9DhN1fZQoaAZHQMMpWhCD28JoB03oA2gIR0C4Si5XuE26dX2UKGgGR0DC9R3bXYlIaAdN6ANoCEdAuEyETviLl3V9lChoBkdAwtEbukUKzGgHTegDaAhHQLhO3OZ9d/t1fZQoaAZHQMLCMKYiPhhoB03oA2gIR0C4UTIChew+dX2UKGgGR0DCC1XYL9deaAdN6ANoCEdAuFOLLr5ZbXV9lChoBkdAwUGDst03fmgHTegDaAhHQLhV4Z8KG+N1fZQoaAZHQMIIVbAckt5oB03oA2gIR0C4WDnEyckMdX2UKGgGR0DCdBZnrY5DaAdN6ANoCEdAuFqPWy1NQHV9lChoBkdAwvbrQcghbGgHTegDaAhHQLhc+nHNorZ1fZQoaAZHQMIUALkS26VoB03oA2gIR0C4X1JmEoOQdX2UKGgGR0DCXO3vhIe6aAdN6ANoCEdAuGGqYQarFXV9lChoBkdAwkWfdPci4mgHTegDaAhHQLhkAlqJuVJ1fZQoaAZHQMF9Bt6PbPBoB03oA2gIR0C4ZmBib2DhdX2UKGgGR0DCrAj8ejmCaAdN6ANoCEdAuGi6zqrzXnV9lChoBkdAws3Ag4ffXWgHTegDaAhHQLhrF9JBgNR1fZQoaAZHQMHeJbyhBZ9oB03oA2gIR0C4bXVXq7iAdX2UKGgGR0DCPS9VcUudaAdN6ANoCEdAuG/VddE9dXV9lChoBkdAwhceS/0ulGgHTegDaAhHQLhyMruYx+N1fZQoaAZHQMIRdWrOqvNoB03oA2gIR0C4dKukHlfadX2UKGgGR0Ci6WiZF5OaaAdN6ANoCEdAuHcFJNCZ4XV9lChoBkdAwlZuAmzBymgHTegDaAhHQLh5YsMRYih1fZQoaAZHQMFd6xBu4w1oB03oA2gIR0C4h4BODaoNdX2UKGgGR0DCm83TEzfraAdN6ANoCEdAuInWKNyYHHVlLg=="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 2395307, "observation_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVsQIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY4lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWEQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJRoCIwCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksRhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoESiWEQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJRoFUsRhZRoGXSUUpSMBl9zaGFwZZRLEYWUjANsb3eUaBEologAAAAAAAAAAAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/wAAAAAAAPD/AAAAAAAA8P8AAAAAAADw/5RoC0sRhZRoGXSUUpSMBGhpZ2iUaBEologAAAAAAAAAAAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwfwAAAAAAAPB/AAAAAAAA8H8AAAAAAADwf5RoC0sRhZRoGXSUUpSMCGxvd19yZXBylIwELWluZpSMCWhpZ2hfcmVwcpSMA2luZpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float64", "bounded_below": "[False False False False False False False False False False False False\n False False False False False]", "bounded_above": "[False False False False False False False False False False False False\n False False False False False]", "_shape": [17], "low": "[-inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf]", "high": "[inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf]", "low_repr": "-inf", "high_repr": "inf", "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVfwIAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWBgAAAAAAAAABAQEBAQGUaAiMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLBoWUjAFDlHSUUpSMDWJvdW5kZWRfYWJvdmWUaBEolgYAAAAAAAAAAQEBAQEBlGgVSwaFlGgZdJRSlIwGX3NoYXBllEsGhZSMA2xvd5RoESiWGAAAAAAAAAAAAIC/AACAvwAAgL8AAIC/AACAvwAAgL+UaAtLBoWUaBl0lFKUjARoaWdolGgRKJYYAAAAAAAAAAAAgD8AAIA/AACAPwAAgD8AAIA/AACAP5RoC0sGhZRoGXSUUpSMCGxvd19yZXBylIwELTEuMJSMCWhpZ2hfcmVwcpSMAzEuMJSMCl9ucF9yYW5kb22UjBRudW1weS5yYW5kb20uX3BpY2tsZZSMEF9fZ2VuZXJhdG9yX2N0b3KUk5SMBVBDRzY0lGgyjBRfX2JpdF9nZW5lcmF0b3JfY3RvcpSTlIaUUpR9lCiMDWJpdF9nZW5lcmF0b3KUjAVQQ0c2NJSMBXN0YXRllH2UKGg9ihA6z5GHrnLfBeG/QkZaiCUzjANpbmOUihGlluXzqdyrAUtuDLN5lNnYAHWMCmhhc191aW50MzKUSwCMCHVpbnRlZ2VylEsAdWJ1Yi4=", "dtype": "float32", "bounded_below": "[ True True True True True True]", "bounded_above": "[ True True True True True True]", "_shape": [6], "low": "[-1. -1. -1. -1. -1. -1.]", "high": "[1. 1. 1. 1. 1. 1.]", "low_repr": "-1.0", "high_repr": "1.0", "_np_random": "Generator(PCG64)"}, "n_envs": 1, "buffer_size": 1000000, "batch_size": 100, "learning_starts": 100, "tau": 0.05, "gamma": 0.99, "gradient_steps": 1, "optimize_memory_usage": false, "replay_buffer_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVNQAAAAAAAACMIHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5idWZmZXJzlIwMUmVwbGF5QnVmZmVylJOULg==", "__module__": "stable_baselines3.common.buffers", "__annotations__": "{'observations': <class 'numpy.ndarray'>, 'next_observations': <class 'numpy.ndarray'>, 'actions': <class 'numpy.ndarray'>, 'rewards': <class 'numpy.ndarray'>, 'dones': <class 'numpy.ndarray'>, 'timeouts': <class 'numpy.ndarray'>}", "__doc__": "\n Replay buffer used in off-policy algorithms like SAC/TD3.\n\n :param buffer_size: Max number of element in the buffer\n :param observation_space: Observation space\n :param action_space: Action space\n :param device: PyTorch device\n :param n_envs: Number of parallel environments\n :param optimize_memory_usage: Enable a memory efficient variant\n of the replay buffer which reduces by almost a factor two the memory used,\n at a cost of more complexity.\n See https://github.com/DLR-RM/stable-baselines3/issues/37#issuecomment-637501195\n and https://github.com/DLR-RM/stable-baselines3/pull/28#issuecomment-637559274\n Cannot be used in combination with handle_timeout_termination.\n :param handle_timeout_termination: Handle timeout termination (due to timelimit)\n separately and treat the task as infinite horizon task.\n https://github.com/DLR-RM/stable-baselines3/issues/284\n ", "__init__": "<function ReplayBuffer.__init__ at 0x318100a60>", "add": "<function ReplayBuffer.add at 0x318100af0>", "sample": "<function ReplayBuffer.sample at 0x318100b80>", "_get_samples": "<function ReplayBuffer._get_samples at 0x318100c10>", "_maybe_cast_dtype": "<staticmethod object at 0x3180f6a90>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x3180fee80>"}, "replay_buffer_kwargs": {}, "train_freq": {":type:": "<class 'stable_baselines3.common.type_aliases.TrainFreq'>", ":serialized:": "gAWVYQAAAAAAAACMJXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi50eXBlX2FsaWFzZXOUjAlUcmFpbkZyZXGUk5RLAWgAjBJUcmFpbkZyZXF1ZW5jeVVuaXSUk5SMBHN0ZXCUhZRSlIaUgZQu"}, "use_sde_at_warmup": false, "policy_delay": 1, "target_noise_clip": 0.0, "target_policy_noise": 0.1, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwMAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLA0sTQwx0AIgAfACDAYMBUwCUToWUjAVmbG9hdJSFlIwScHJvZ3Jlc3NfcmVtYWluaW5nlIWUjGovVXNlcnMvYW1hbmthbmtyaXlhL0RvY3VtZW50cy9EZWVwIFJML2RlZXBybC9saWIvcHl0aG9uMy45L3NpdGUtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lIwIPGxhbWJkYT6US2FDAJSMDnZhbHVlX3NjaGVkdWxllIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMai9Vc2Vycy9hbWFua2Fua3JpeWEvRG9jdW1lbnRzL0RlZXAgUkwvZGVlcHJsL2xpYi9weXRob24zLjkvc2l0ZS1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUdU5OaACMEF9tYWtlX2VtcHR5X2NlbGyUk5QpUpSFlHSUUpRoAIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaCF9lH2UKGgYaA+MDF9fcXVhbG5hbWVfX5SMIWdldF9zY2hlZHVsZV9mbi48bG9jYWxzPi48bGFtYmRhPpSMD19fYW5ub3RhdGlvbnNfX5R9lIwOX19rd2RlZmF1bHRzX1+UTowMX19kZWZhdWx0c19flE6MCl9fbW9kdWxlX1+UaBmMB19fZG9jX1+UTowLX19jbG9zdXJlX1+UaACMCl9tYWtlX2NlbGyUk5RoAihoByhLAUsASwBLAUsBSxNDBIgAUwCUaAkpjAFflIWUaA6MBGZ1bmOUS4VDAgABlIwDdmFslIWUKXSUUpRoFU5OaB0pUpSFlHSUUpRoI2g9fZR9lChoGGg0aCaMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUaCh9lGgqTmgrTmgsaBloLU5oLmgwRz80+LWI42jxhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjCFlFKUhZRoRV2UaEd9lHWGlIZSMC4="}, "actor_batch_norm_stats": [], "critic_batch_norm_stats": [], "actor_batch_norm_stats_target": [], "critic_batch_norm_stats_target": [], "system_info": {"OS": "macOS-14.5-arm64-arm-64bit Darwin Kernel Version 23.5.0: Wed May 1 20:19:05 PDT 2024; root:xnu-10063.121.3~5/RELEASE_ARM64_T8112", "Python": "3.9.6", "Stable-Baselines3": "2.3.2", "PyTorch": "2.3.0", "GPU Enabled": "False", "Numpy": "1.26.4", "Cloudpickle": "3.0.0", "Gymnasium": "0.29.1"}}
replay.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1a1caaf06640ccf12c04b3077e96399ae07c4b685e690684cd26f4ec19390bd7
3
+ size 1418453
results.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"mean_reward": 9140.964570245333, "std_reward": 218.2285546836008, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2024-06-10T16:33:10.114156"}