Refined v3
Browse files- README.md +1 -1
- a2c-AntBulletEnv-v0.zip +2 -2
- a2c-AntBulletEnv-v0/data +23 -23
- a2c-AntBulletEnv-v0/policy.optimizer.pth +1 -1
- a2c-AntBulletEnv-v0/policy.pth +1 -1
- config.json +1 -1
- replay.mp4 +2 -2
- results.json +1 -1
- vec_normalize.pkl +1 -1
README.md
CHANGED
@@ -16,7 +16,7 @@ model-index:
|
|
16 |
type: AntBulletEnv-v0
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
-
value:
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
|
|
16 |
type: AntBulletEnv-v0
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
+
value: 1098.73 +/- 195.97
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
a2c-AntBulletEnv-v0.zip
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:93047f08689f921b94a55f209be61c865345d977c1a1a73adfa2e2165efe3843
|
3 |
+
size 129240
|
a2c-AntBulletEnv-v0/data
CHANGED
@@ -4,20 +4,20 @@
|
|
4 |
":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
-
"__init__": "<function ActorCriticPolicy.__init__ at
|
8 |
-
"_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at
|
9 |
-
"reset_noise": "<function ActorCriticPolicy.reset_noise at
|
10 |
-
"_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at
|
11 |
-
"_build": "<function ActorCriticPolicy._build at
|
12 |
-
"forward": "<function ActorCriticPolicy.forward at
|
13 |
-
"extract_features": "<function ActorCriticPolicy.extract_features at
|
14 |
-
"_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at
|
15 |
-
"_predict": "<function ActorCriticPolicy._predict at
|
16 |
-
"evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at
|
17 |
-
"get_distribution": "<function ActorCriticPolicy.get_distribution at
|
18 |
-
"predict_values": "<function ActorCriticPolicy.predict_values at
|
19 |
"__abstractmethods__": "frozenset()",
|
20 |
-
"_abc_impl": "<_abc._abc_data object at
|
21 |
},
|
22 |
"verbose": 1,
|
23 |
"policy_kwargs": {
|
@@ -32,12 +32,12 @@
|
|
32 |
"weight_decay": 0
|
33 |
}
|
34 |
},
|
35 |
-
"num_timesteps":
|
36 |
-
"_total_timesteps":
|
37 |
"_num_timesteps_at_start": 0,
|
38 |
"seed": null,
|
39 |
"action_noise": null,
|
40 |
-
"start_time":
|
41 |
"learning_rate": 0.001,
|
42 |
"tensorboard_log": null,
|
43 |
"lr_schedule": {
|
@@ -46,7 +46,7 @@
|
|
46 |
},
|
47 |
"_last_obs": {
|
48 |
":type:": "<class 'numpy.ndarray'>",
|
49 |
-
":serialized:": "
|
50 |
},
|
51 |
"_last_episode_starts": {
|
52 |
":type:": "<class 'numpy.ndarray'>",
|
@@ -54,24 +54,24 @@
|
|
54 |
},
|
55 |
"_last_original_obs": {
|
56 |
":type:": "<class 'numpy.ndarray'>",
|
57 |
-
":serialized:": "
|
58 |
},
|
59 |
"_episode_num": 0,
|
60 |
"use_sde": true,
|
61 |
"sde_sample_freq": -1,
|
62 |
-
"_current_progress_remaining":
|
63 |
"_stats_window_size": 100,
|
64 |
"ep_info_buffer": {
|
65 |
":type:": "<class 'collections.deque'>",
|
66 |
-
":serialized:": "
|
67 |
},
|
68 |
"ep_success_buffer": {
|
69 |
":type:": "<class 'collections.deque'>",
|
70 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
71 |
},
|
72 |
-
"_n_updates":
|
73 |
-
"n_steps":
|
74 |
-
"gamma": 0.
|
75 |
"gae_lambda": 0.9,
|
76 |
"ent_coef": 0.0,
|
77 |
"vf_coef": 0.4,
|
|
|
4 |
":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function ActorCriticPolicy.__init__ at 0x7b79cceaea70>",
|
8 |
+
"_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7b79cceaeb00>",
|
9 |
+
"reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7b79cceaeb90>",
|
10 |
+
"_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7b79cceaec20>",
|
11 |
+
"_build": "<function ActorCriticPolicy._build at 0x7b79cceaecb0>",
|
12 |
+
"forward": "<function ActorCriticPolicy.forward at 0x7b79cceaed40>",
|
13 |
+
"extract_features": "<function ActorCriticPolicy.extract_features at 0x7b79cceaedd0>",
|
14 |
+
"_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7b79cceaee60>",
|
15 |
+
"_predict": "<function ActorCriticPolicy._predict at 0x7b79cceaeef0>",
|
16 |
+
"evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7b79cceaef80>",
|
17 |
+
"get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7b79cceaf010>",
|
18 |
+
"predict_values": "<function ActorCriticPolicy.predict_values at 0x7b79cceaf0a0>",
|
19 |
"__abstractmethods__": "frozenset()",
|
20 |
+
"_abc_impl": "<_abc._abc_data object at 0x7b79d986fd80>"
|
21 |
},
|
22 |
"verbose": 1,
|
23 |
"policy_kwargs": {
|
|
|
32 |
"weight_decay": 0
|
33 |
}
|
34 |
},
|
35 |
+
"num_timesteps": 2500000,
|
36 |
+
"_total_timesteps": 2500000,
|
37 |
"_num_timesteps_at_start": 0,
|
38 |
"seed": null,
|
39 |
"action_noise": null,
|
40 |
+
"start_time": 1690168510310218753,
|
41 |
"learning_rate": 0.001,
|
42 |
"tensorboard_log": null,
|
43 |
"lr_schedule": {
|
|
|
46 |
},
|
47 |
"_last_obs": {
|
48 |
":type:": "<class 'numpy.ndarray'>",
|
49 |
+
":serialized:": "gAWVNQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJbAAQAAAAAAAIXNib9ORIG/Rszlvoq8Wb/SsAo/M1pdPdRWSz9I7AJAdcTlPbMFFL9q4IW/uiADPdRrfT9jogfA1layv5l1uD+t0a8/skxav4BR0j4bm/a+aIvfPgx2i79s27e/i2EzPxkoET/PKqA+YSoYPzBvhz/X2+s+242av5+NZr9EMyC/+cnyvV1Tir//Ot2+DVyRPw49lz+W1Gk+X3c2v43GGb9u7ak/xYSPvDf0zr+XpTW/CF6uv9j4hD8/4QDA3ikMvrXiGr8z4RxALxOkP1ONob4ZKBE/zyqgPmEqGD+Q8nG/M8ABPlkHcD7jxio/fqfBvmMqbzwjuNE+fxv/vkGDPr66eRS/iBkJPwFDGD4phPM93WrLvk5iKT8ZGwU/eNHWPCy73T7CSCo9lE4qv3SlbD3kcbi/dTkQOcerqT8zfka9GSgRP88qoD5hKhg/kPJxvxIZGD/7crK/8R34v/FDhz7PXZG+n1guPxZIhr2HuXg/0rViP7Ucpz+uf5a+TFRSPN/c7j4tsxc/ptS5v5FyrD+yaEG+esQEwAGY/7+VwlI+Jjqqv3Nr7D5r5Ew/X8QDQBkoET/PKqA+YSoYPzBvhz+UjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwRLHIaUjAFDlHSUUpQu"
|
50 |
},
|
51 |
"_last_episode_starts": {
|
52 |
":type:": "<class 'numpy.ndarray'>",
|
|
|
54 |
},
|
55 |
"_last_original_obs": {
|
56 |
":type:": "<class 'numpy.ndarray'>",
|
57 |
+
":serialized:": "gAWVNQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJbAAQAAAAAAAAAAAABFkEu0AACAPwAAAAAAAAAAAAAAAAAAAAAAAACA7iTNvAAAAAAn0Pu/AAAAAEiQ970AAAAAO8b6PwAAAAB66RE8AAAAAOI7+z8AAAAAhW+YvQAAAAC8pOu/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5xfWNQAAgD8AAAAAAAAAAAAAAAAAAAAAAAAAgPXs7b0AAAAALmv4vwAAAADeHgc+AAAAAFr++D8AAAAAQnmBvQAAAADiRu4/AAAAABdn9z0AAAAA3pngvwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOF5EzIAAIA/AAAAAAAAAAAAAAAAAAAAAAAAAICKpF48AAAAAK8z778AAAAAOZFJvQAAAAC/QwFAAAAAAMjxhT0AAAAAwtLbPwAAAADqtcU6AAAAAFC94b8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACHelY2AACAPwAAAAAAAAAAAAAAAAAAAAAAAACAlqq/vQAAAACCiN2/AAAAACdJ0D0AAAAAffjwPwAAAACHGQ6+AAAAABcz7j8AAAAAaI+qvQAAAACqRv2/AAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwRLHIaUjAFDlHSUUpQu"
|
58 |
},
|
59 |
"_episode_num": 0,
|
60 |
"use_sde": true,
|
61 |
"sde_sample_freq": -1,
|
62 |
+
"_current_progress_remaining": 0.0,
|
63 |
"_stats_window_size": 100,
|
64 |
"ep_info_buffer": {
|
65 |
":type:": "<class 'collections.deque'>",
|
66 |
+
":serialized:": "gAWVQQwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQJAp0wztTk2MAWyUTegDjAF0lEdAsH6AFqzqr3V9lChoBkdAkoX90zTF2mgHTegDaAhHQLB/KMBIWgx1fZQoaAZHQJFgIi1RceNoB03oA2gIR0Cwg2+WjXWfdX2UKGgGR0CRVieUpuuSaAdN6ANoCEdAsINyTkhib3V9lChoBkdAiKhplz2ex2gHTegDaAhHQLCE/X3xnWd1fZQoaAZHQJNrYhib2DhoB03oA2gIR0CwhaeGXXyzdX2UKGgGR0CSAtgntv4uaAdN6ANoCEdAsIsDOB19v3V9lChoBkdAhu+mIsRQJ2gHTegDaAhHQLCLBfhddE91fZQoaAZHQIx1rFbVz6toB03oA2gIR0CwjIuRoysTdX2UKGgGR0CPLCf+0gKXaAdN6ANoCEdAsI0xAv+OwXV9lChoBkdAkI1X3cpLEmgHTegDaAhHQLCRW27Wd3B1fZQoaAZHQI9V4hhYvFpoB03oA2gIR0CwkV5KzzErdX2UKGgGR0CUqJk7OmiyaAdN6ANoCEdAsJLibhFVk3V9lChoBkdAkhbDBZZB9mgHTegDaAhHQLCThQEIPbx1fZQoaAZHQJNz92B8QZpoB03oA2gIR0CwmNOXE61cdX2UKGgGR0CTyDqxC6YmaAdN6ANoCEdAsJjWWGATZnV9lChoBkdAg0yOeSSvDGgHTegDaAhHQLCaYq6e5Fx1fZQoaAZHQJV/bxYq5LBoB03oA2gIR0CwmwXuZ1FIdX2UKGgGR0CR5ksySFGoaAdN6ANoCEdAsJ8/rgOz6nV9lChoBkdAlLoIrvsqrmgHTegDaAhHQLCfQn6VMVV1fZQoaAZHQJRk4QxvegtoB03oA2gIR0CwoM13hXKbdX2UKGgGR0CTCbw3YL9daAdN6ANoCEdAsKFzUTcqOXV9lChoBkdAjMgsPBi1A2gHTegDaAhHQLCm0NJe3QV1fZQoaAZHQJDCPmp2ll9oB03oA2gIR0CwptOSr5qNdX2UKGgGR0CLu2LRa5f/aAdN6ANoCEdAsKhXq1PWQXV9lChoBkdAipLNjkMkQmgHTegDaAhHQLCo9djG1hN1fZQoaAZHQIhkW0ojOcFoB03oA2gIR0CwrTJ97WupdX2UKGgGR0CFXCkCV8kVaAdN6ANoCEdAsK01WFN+LHV9lChoBkdAhTHCE6DGtWgHTegDaAhHQLCuu7Dl5nl1fZQoaAZHQIZQJllK9PFoB03oA2gIR0Cwr2BMSK3vdX2UKGgGR0CJYWdlum78aAdN6ANoCEdAsLS3mozeoHV9lChoBkdAh1A7yhBZ6mgHTegDaAhHQLC0uk8Rtgt1fZQoaAZHQIyYogNgBtFoB03oA2gIR0CwtkRxgiNbdX2UKGgGR0CPr0w6hg3MaAdN6ANoCEdAsLbpj6N2knV9lChoBkdAkgZXbmEGq2gHTegDaAhHQLC7HMqz7dl1fZQoaAZHQJIfaCuloDhoB03oA2gIR0Cwux+UliSadX2UKGgGR0CSPYeOGTLXaAdN6ANoCEdAsLyt3MY/FHV9lChoBkdAkk/Fp9JBgWgHTegDaAhHQLC9fWom5Ud1fZQoaAZHQJTGxftx+8ZoB03oA2gIR0CwwpzynUDudX2UKGgGR0CRsNYvFm4BaAdN6ANoCEdAsMKfrAxi5XV9lChoBkdAkxr84tHx0GgHTegDaAhHQLDEIIxQBPt1fZQoaAZHQJD5hv3rUspoB03oA2gIR0CwxMHBpHqedX2UKGgGR0CW9d2Jiy6daAdN6ANoCEdAsMj5Oj7AL3V9lChoBkdAkRYeAVfu1GgHTegDaAhHQLDI/BjnV5N1fZQoaAZHQJYPdr9ETg5oB03oA2gIR0Cwyo0KNQ0odX2UKGgGR0CO3//qgRK6aAdN6ANoCEdAsMt7DziCKHV9lChoBkdAkVBKxkd3jmgHTegDaAhHQLDQj80k4WF1fZQoaAZHQJVE49SuQp5oB03oA2gIR0Cw0JKoddVvdX2UKGgGR0A7JhmXgLqmaAdLemgIR0Cw0VU6PsAvdX2UKGgGR0CR8Xy1eBxxaAdN6ANoCEdAsNIbhsImgXV9lChoBkdAk/GfUrkKeGgHTegDaAhHQLDSw3rD6311fZQoaAZHQDxgmAskIHFoB0tqaAhHQLDSyrkKeCl1fZQoaAZHQIjhE8FINExoB03oA2gIR0Cw1wPBeokzdX2UKGgGR0CJkRrAxi5NaAdN6ANoCEdAsNfQXYUWVXV9lChoBkdAf+WFH8TBZmgHTegDaAhHQLDZub4agmJ1fZQoaAZHQIspLhLoOhFoB03oA2gIR0Cw2cL655JLdX2UKGgGR0A/yBguyu6maAdLZ2gIR0Cw2sz2alUIdX2UKGgGR0CQCFsAvL5iaAdN6ANoCEdAsN6ukyk9EHV9lChoBkdAj6PhbW3BpGgHTegDaAhHQLDfd08vEjx1fZQoaAZHQI5l07nxJ/ZoB03oA2gIR0Cw4OIYzi0fdX2UKGgGR0CI+4e5Fw1jaAdN6ANoCEdAsOGURUWEb3V9lChoBkdAkeHN4RmK7GgHTegDaAhHQLDlFUWl/H51fZQoaAZHQI8/vQD3dsVoB03oA2gIR0Cw5e+fmLccdX2UKGgGR0CTk539aUzLaAdN6ANoCEdAsOf1+NLlFXV9lChoBkdAkTDqYAsCk2gHTegDaAhHQLDpBokRjBl1fZQoaAZHQIjqJ+jM3ZRoB03oA2gIR0Cw7KgJkXk6dX2UKGgGR0CRUBBikO7QaAdN6ANoCEdAsO1qwjdHlXV9lChoBkdAkDYVQMx46mgHTegDaAhHQLDu2PXkHUt1fZQoaAZHQIt+rnmq5sloB03oA2gIR0Cw74paFEiMdX2UKGgGR0CJCYUX531SaAdN6ANoCEdAsPMSesgdO3V9lChoBkdAkK2jIq9XcWgHTegDaAhHQLD0I1ivxH51fZQoaAZHQJMfrjp9qlBoB03oA2gIR0Cw9jvKyOaOdX2UKGgGR0CNNhqpLmITaAdN6ANoCEdAsPcinfl6q3V9lChoBkdAi8G3Q2MsH2gHTegDaAhHQLD6xDZlFtt1fZQoaAZHQIunU/yGzrxoB03oA2gIR0Cw+42fGuLadX2UKGgGR0CJCrPoFFDwaAdN6ANoCEdAsPz9TaTOgXV9lChoBkdAhbA5rP+n62gHTegDaAhHQLD9sM2FWXF1fZQoaAZHQJDMu5TZQHloB03oA2gIR0CxAYgBtDUmdX2UKGgGR0CREYvQWvbHaAdN6ANoCEdAsQKkgeRxLnV9lChoBkdAiNBsfA9FF2gHTegDaAhHQLEEoIXj2jB1fZQoaAZHQIVkqW1MM7VoB03oA2gIR0CxBVTGLk0adX2UKGgGR0CPHu44Ia99aAdN6ANoCEdAsQjUqmTC+HV9lChoBkdAkkmG3WnTAmgHTegDaAhHQLEJk36AOKB1fZQoaAZHQJM0qbc45tFoB03oA2gIR0CxCvl3Y+SsdX2UKGgGR0CQKTfjCHh1aAdN6ANoCEdAsQujVNHpbHV9lChoBkdAkRjokAxSHmgHTegDaAhHQLEPeT+ee4F1fZQoaAZHQJGzviXIEKVoB03oA2gIR0CxEJkdBBzFdX2UKGgGR0CTEs/IsAeaaAdN6ANoCEdAsRJn4FiazHV9lChoBkdAkbj0XDWK/GgHTegDaAhHQLETGimEXch1fZQoaAZHQJWk+ZLIxQBoB03oA2gIR0CxFqsWGh24dX2UKGgGR0CVK41ndweeaAdN6ANoCEdAsRd9irksBnV9lChoBkdAkirjcVQAMmgHTegDaAhHQLEY8F/QSjB1fZQoaAZHQIlGzWwu/URoB03oA2gIR0CxGZ/c8DB/dX2UKGgGR0CKLgzTF2mpaAdN6ANoCEdAsR3X1TR6W3V9lChoBkdAkKfAVKwpv2gHTegDaAhHQLEe/cLSeAd1fZQoaAZHQJBe5QIldC5oB03oA2gIR0CxIH9tIkJKdX2UKGgGR0CHixQbdadMaAdN6ANoCEdAsSFB8UmD2HV9lChoBkdAkmjvkili0GgHTegDaAhHQLEl4f/3nIR1fZQoaAZHQJPvAQPI4l1oB03oA2gIR0CxJqbkS26TdX2UKGgGR0CJihU8V58jaAdN6ANoCEdAsSgYVFhG6XVlLg=="
|
67 |
},
|
68 |
"ep_success_buffer": {
|
69 |
":type:": "<class 'collections.deque'>",
|
70 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
71 |
},
|
72 |
+
"_n_updates": 78125,
|
73 |
+
"n_steps": 8,
|
74 |
+
"gamma": 0.99,
|
75 |
"gae_lambda": 0.9,
|
76 |
"ent_coef": 0.0,
|
77 |
"vf_coef": 0.4,
|
a2c-AntBulletEnv-v0/policy.optimizer.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 56190
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:3ddb180fce8f561e03817be3c6b5c741ef110350dca54b0769e1fea0b6a125f9
|
3 |
size 56190
|
a2c-AntBulletEnv-v0/policy.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 56894
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:22c828b95e69b7bcab200216adad1cc964e9c85102ba70a2cd0a2ed8e863f56b
|
3 |
size 56894
|
config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7fcdea4e8670>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7fcdea4e8700>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7fcdea4e8790>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7fcdea4e8820>", "_build": "<function ActorCriticPolicy._build at 0x7fcdea4e88b0>", "forward": "<function ActorCriticPolicy.forward at 0x7fcdea4e8940>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x7fcdea4e89d0>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7fcdea4e8a60>", "_predict": "<function ActorCriticPolicy._predict at 0x7fcdea4e8af0>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7fcdea4e8b80>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7fcdea4e8c10>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7fcdea4e8ca0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7fcdea4ded80>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVowAAAAAAAAB9lCiMDGxvZ19zdGRfaW5pdJRK/v///4wKb3J0aG9faW5pdJSJjA9vcHRpbWl6ZXJfY2xhc3OUjBN0b3JjaC5vcHRpbS5ybXNwcm9wlIwHUk1TcHJvcJSTlIwQb3B0aW1pemVyX2t3YXJnc5R9lCiMBWFscGhhlEc/764UeuFHrowDZXBzlEc+5Pi1iONo8YwMd2VpZ2h0X2RlY2F5lEsAdXUu", "log_std_init": -2, "ortho_init": false, "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "num_timesteps": 2000000, "_total_timesteps": 2000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1690033410569728382, "learning_rate": 0.00096, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuCQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz9PdRBNVR1phZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVNQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJbAAQAAAAAAAMOJqL//f+a+Z46MPhfc6r6Q8bg9iOtPPt7h2D4ekd4+pQ5KPpdlJD79sx+/Rpo3PaDXgT4714K+TzuvPkQ6F71apIm/6rWLvQ1uVz88PSs+8W+Fv+nIar7bXD6/liAyPkXoID9AKac+GNnoPlPMbj+Ya6I/+owDQEzFI737sJk/iTngPvwZpb9/Hq8/MBBQv3N+pb/lQ0U+MwUWv8Abyb/69xo/BA5vQKxKnL/wobw/aJffPyCfCkBVeLk+DYsjv4szor/dFTZAAOgyP3W4mT8spcu/QCmnPhjZ6D5TzG4/Rh6zv9cq7z6D6i4/+LmRv47RAD8gQWs9MkhVPhVMHT7huKK/ta+sOlne0L4JR0m9BMa4Pa+cc73SEzg/57zhPIGUDj+JBGC8rKpBPzQ9jLzDvCq/it4RPAIoDb9ZDyG8ReggP0Appz4Y2eg+U8xuP8Nb3r7vPBG+WiH4PvQwH75hYJ0/ht2LPn9N4T1thE+/iCqIPgF/AcCV0ia+M6qyv4nJRL/jRII+SqM5P6+MiDuvAJa/s+dwPzPqqD+EqAy/tpw8vyOqv74YRdk+s25zvSyly79AKac+GNnoPlPMbj+UjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwRLHIaUjAFDlHSUUpQu"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVNQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJbAAQAAAAAAAAAAAAAwWL61AACAPwAAAAAAAAAAAAAAAAAAAAAAAACA2cLKPQAAAAA/GvC/AAAAAJ9BnD0AAAAAOaUAQAAAAADw/a09AAAAAO9y4z8AAAAAheL5PQAAAADI/eW/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAarnqtgAAgD8AAAAAAAAAAAAAAAAAAAAAAAAAgP4GpT0AAAAAqK3uvwAAAABO3Qe+AAAAAIcH8T8AAAAA28ClPAAAAAACo/c/AAAAACLk4T0AAAAA4MTcvwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABkdhzYAAIA/AAAAAAAAAAAAAAAAAAAAAAAAAIDQfd28AAAAABrf9L8AAAAAwvKiPQAAAACl4N4/AAAAAGeui7wAAAAAv7n3PwAAAABLIKq9AAAAACLy878AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADL5N20AACAPwAAAAAAAAAAAAAAAAAAAAAAAACA3vcpvQAAAACUz/m/AAAAAEsMKr0AAAAAJIDrPwAAAADTeu48AAAAALQ8/T8AAAAA8S83PQAAAABnR9m/AAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwRLHIaUjAFDlHSUUpQu"}, "_episode_num": 0, "use_sde": true, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVRAwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQJQ+8jt5UtKMAWyUTegDjAF0lEdAqojhoK2KEXV9lChoBkdAk1/VAzHjqGgHTegDaAhHQKqS6mqHXVd1fZQoaAZHQJb02/Ho5ghoB03oA2gIR0Cqk4u2iL2pdX2UKGgGR0CVv/fg75mAaAdN6ANoCEdAqpS2cx0uDnV9lChoBkdAlLfPfTCtR2gHTegDaAhHQKqYQVvddmh1fZQoaAZHQJWMLfl6qsFoB03oA2gIR0Cqn/GA08/2dX2UKGgGR0CTj6jopx3naAdN6ANoCEdAqqCL9n9NvnV9lChoBkdAlyOFl5GBnWgHTegDaAhHQKqhrr1uivh1fZQoaAZHQJMrqMfigkFoB03oA2gIR0CqpSeaBqbjdX2UKGgGR0CUl9Z4fOlgaAdN6ANoCEdAqq87IPsiS3V9lChoBkdAk8wjHn2ZiWgHTegDaAhHQKqv2T+NtIl1fZQoaAZHQJV00Gkep4toB03oA2gIR0CqsPohQm/ndX2UKGgGR0CQDfc580DVaAdN6ANoCEdAqrRxQSBbwHV9lChoBkdAlEOwUYbbUWgHTegDaAhHQKq8QfOD8Lt1fZQoaAZHQJZLm08eS0VoB03oA2gIR0CqvORWcSXddX2UKGgGR0CWQ+rOZ9eAaAdN6ANoCEdAqr4GWIGhVXV9lChoBkdAlSwZ6dDpkmgHTegDaAhHQKrBfeHBUJh1fZQoaAZHQJdDwEwFkhBoB03oA2gIR0Cqy2iB5HEudX2UKGgGR0CVX7m78Nx3aAdN6ANoCEdAqswF+iJwbXV9lChoBkdAl0iVkc0cfmgHTegDaAhHQKrNJCwbEP11fZQoaAZHQJHSYp/gBLhoB03oA2gIR0Cq0JbrcCYDdX2UKGgGR0CaB6RiPQv6aAdN6ANoCEdAqthQ93bEgnV9lChoBkdAlUL1PN3W4GgHTegDaAhHQKrY8jmCAc11fZQoaAZHQJkUQfV7QcBoB03oA2gIR0Cq2hX8fmtAdX2UKGgGR0CVqmm1IAfdaAdN6ANoCEdAqt39l9SdfHV9lChoBkdAmPPZAIIF/2gHTegDaAhHQKrneVHFxXJ1fZQoaAZHQJZx1Dc/MW5oB03oA2gIR0Cq6B68xsVMdX2UKGgGR0CSF0d8zAN5aAdN6ANoCEdAqulA3HaN/HV9lChoBkdAlFbLeqJdjWgHTegDaAhHQKrsqDp1RtR1fZQoaAZHQJZ5x+EytV9oB03oA2gIR0Cq9Gofr8iwdX2UKGgGR0CVRpI3zcynaAdN6ANoCEdAqvUJWDHwPXV9lChoBkdAlWVzBZZB9mgHTegDaAhHQKr2JtMPBi11fZQoaAZHQJMN3j6vaDhoB03oA2gIR0Cq+nfu1F6SdX2UKGgGR0CRbIxkNFz/aAdN6ANoCEdAqwOTDKoybnV9lChoBkdAkOpSVObiImgHTegDaAhHQKsELk1dgOV1fZQoaAZHQJDZhmRNh3JoB03oA2gIR0CrBVGwJPZadX2UKGgGR0CSaUV9nbqRaAdN6ANoCEdAqwi50bLlm3V9lChoBkdAjyTpD/lyR2gHTegDaAhHQKsQivHLidd1fZQoaAZHQITOKyQgcLloB03oA2gIR0CrETfnnuAqdX2UKGgGR0CIMOBpYcNpaAdN6ANoCEdAqxJh1Tzd13V9lChoBkdAgP3VU+9rXWgHTegDaAhHQKsXUE6kqMF1fZQoaAZHQHiijdLxqfxoB03oA2gIR0CrH+4zJp35dX2UKGgGR0ByULhisny/aAdN6ANoCEdAqyCPpB5X2nV9lChoBkdAgkNWAoXsPmgHTegDaAhHQKshu0iyIHl1fZQoaAZHQIU5FDhLoOhoB03oA2gIR0CrJRwSamXPdX2UKGgGR0B/p2rGR3eOaAdN6ANoCEdAqyzYjps41nV9lChoBkdAh77hAOavzWgHTegDaAhHQKstf0TURWd1fZQoaAZHQI/lqtq59VpoB03oA2gIR0CrLxBrvb48dX2UKGgGR0CNDTMV1wHaaAdN6ANoCEdAqzQxmPHT7XV9lChoBkdAkXTdNet0WGgHTegDaAhHQKs8GMpgCwN1fZQoaAZHQJA+pOM2m51oB03oA2gIR0CrPLdRaX8gdX2UKGgGR0CSuFggow23aAdN6ANoCEdAqz3O938n/nV9lChoBkdAkIl+x4Y772gHTegDaAhHQKtBRdadMCd1fZQoaAZHQJBqvr2QGOdoB03oA2gIR0CrSSAOz6acdX2UKGgGR0CAEgir1dxAaAdN6ANoCEdAq0oCi48U23V9lChoBkdAgWwa+36RAGgHTegDaAhHQKtLqGlhw2l1fZQoaAZHQI16CLVFx4poB03oA2gIR0CrULJsGgSOdX2UKGgGR0CCMeHk92X+aAdN6ANoCEdAq1h+0zCUHXV9lChoBkdAi369gv114mgHTegDaAhHQKtZIoOQQtl1fZQoaAZHQICTAm1IAfdoB03oA2gIR0CrWkYGD+R6dX2UKGgGR0CIwRObAk9maAdN6ANoCEdAq1261NQCS3V9lChoBkdAhq/KISDh+GgHTegDaAhHQKtmGXMQmNR1fZQoaAZHQIL7W2mYSg5oB03oA2gIR0CrZv6AvtdBdX2UKGgGR0CK6Lvl2eQNaAdN6ANoCEdAq2izCxeLN3V9lChoBkdAhFOPhybQTmgHTegDaAhHQKttDFId2gZ1fZQoaAZHQIDv4bbUPQRoB03oA2gIR0CrdNOg6EJ0dX2UKGgGR0CIFiT4+KTCaAdN6ANoCEdAq3VvWDpTuXV9lChoBkdAhDpiFK02L2gHTegDaAhHQKt2jCeEqUh1fZQoaAZHQJJwEFr2xptoB03oA2gIR0Cree3JPqLTdX2UKGgGR0CVQkXBP9DQaAdN6ANoCEdAq4KeKMvRJHV9lChoBkdAloUG7SRbKWgHTegDaAhHQKuDhCyhSLt1fZQoaAZHQJN7c+A3DN1oB03oA2gIR0CrhUaYVqN7dX2UKGgGR0CTQ/EjgQ6IaAdN6ANoCEdAq4kQA+6iCnV9lChoBkdAlwwoBV+7UWgHTegDaAhHQKuQsqur6tV1fZQoaAZHQIXK6Cg9NetoB03oA2gIR0CrkU/e+Eh8dX2UKGgGR0CVjbyMUAT7aAdN6ANoCEdAq5J5yU9py3V9lChoBkdAlHhdv863iWgHTegDaAhHQKuV6pS75Ed1fZQoaAZHQJa+HlKbrkdoB03oA2gIR0CrnvhCD28JdX2UKGgGR0CWZvNZeRgaaAdN6ANoCEdAq5/pRyfcvnV9lChoBkdAjYko1DSgG2gHTegDaAhHQKuhnH/cWTJ1fZQoaAZHQJUoWOp84PxoB03oA2gIR0CrpR0mlZX/dX2UKGgGR0CV70M2FWXDaAdN6ANoCEdAq6zWm51/2HV9lChoBkdAlJVO3trsSmgHTegDaAhHQKutdsZYPoV1fZQoaAZHQJSVWANG3F1oB03oA2gIR0CrroveYUnHdX2UKGgGR0CVDH4uscQzaAdN6ANoCEdAq7HgHVwxWXV9lChoBkdAksWtdeIEbGgHTegDaAhHQKu7itvGZNR1fZQoaAZHQJCVyTaCcwxoB03oA2gIR0CrvHvbwjMWdX2UKGgGR0CV47OryUcGaAdN6ANoCEdAq72hvxYq5XV9lChoBkdAlBQitA9mpWgHTegDaAhHQKvBF2gWac91fZQoaAZHQIRY7qhUR4BoB03oA2gIR0CryOilSCOFdX2UKGgGR0CMmxRdhRZVaAdN6ANoCEdAq8mL0L+glHV9lChoBkdAjQSOB19v0mgHTegDaAhHQKvKrJkGzKN1fZQoaAZHQIrzmoBJZntoB03oA2gIR0CrzhKBNEgGdX2UKGgGR0CRC3naFmFraAdN6ANoCEdAq9llo11nunV9lChoBkdAhVmUkfLcK2gHTegDaAhHQKvaRD3ueBh1fZQoaAZHQJF4YUsWfshoB03oA2gIR0Cr2/fkvK2bdX2UKGgGR0CN7hiS7oStaAdN6ANoCEdAq9/bf51vEXV9lChoBkdAlm9O1rqMWGgHTegDaAhHQKvnhkWAPNF1fZQoaAZHQJVC+ys0YTFoB03oA2gIR0Cr6CaRp1zRdX2UKGgGR0CEQTU+cH4XaAdN6ANoCEdAq+lR2OhkAnVlLg=="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 62500, "n_steps": 8, "gamma": 0.99, "gae_lambda": 0.9, "ent_coef": 0.0, "vf_coef": 0.4, "max_grad_norm": 0.5, "normalize_advantage": false, "observation_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQIAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLHIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWcAAAAAAAAAAAAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/lGgLSxyFlIwBQ5R0lFKUjARoaWdolGgTKJZwAAAAAAAAAAAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH+UaAtLHIWUaBZ0lFKUjA1ib3VuZGVkX2JlbG93lGgTKJYcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUaAiMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLHIWUaBZ0lFKUjA1ib3VuZGVkX2Fib3ZllGgTKJYcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUaCJLHIWUaBZ0lFKUjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "_shape": [28], "low": "[-inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf]", "high": "[inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf\n inf inf inf inf inf inf inf inf inf inf]", "bounded_below": "[False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False]", "bounded_above": "[False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False]", "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVpQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLCIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWIAAAAAAAAAAAAIC/AACAvwAAgL8AAIC/AACAvwAAgL8AAIC/AACAv5RoC0sIhZSMAUOUdJRSlIwEaGlnaJRoEyiWIAAAAAAAAAAAAIA/AACAPwAAgD8AAIA/AACAPwAAgD8AAIA/AACAP5RoC0sIhZRoFnSUUpSMDWJvdW5kZWRfYmVsb3eUaBMolggAAAAAAAAAAQEBAQEBAQGUaAiMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLCIWUaBZ0lFKUjA1ib3VuZGVkX2Fib3ZllGgTKJYIAAAAAAAAAAEBAQEBAQEBlGgiSwiFlGgWdJRSlIwKX25wX3JhbmRvbZROdWIu", "dtype": "float32", "_shape": [8], "low": "[-1. -1. -1. -1. -1. -1. -1. -1.]", "high": "[1. 1. 1. 1. 1. 1. 1. 1.]", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_np_random": null}, "n_envs": 4, "system_info": {"OS": "Linux-5.15.109+-x86_64-with-glibc2.35 # 1 SMP Fri Jun 9 10:57:30 UTC 2023", "Python": "3.10.6", "Stable-Baselines3": "1.8.0", "PyTorch": "2.0.1+cu118", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
|
|
1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVOwAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMEUFjdG9yQ3JpdGljUG9saWN5lJOULg==", "__module__": "stable_baselines3.common.policies", "__doc__": "\n Policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Features extractor to use.\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function ActorCriticPolicy.__init__ at 0x7b79cceaea70>", "_get_constructor_parameters": "<function ActorCriticPolicy._get_constructor_parameters at 0x7b79cceaeb00>", "reset_noise": "<function ActorCriticPolicy.reset_noise at 0x7b79cceaeb90>", "_build_mlp_extractor": "<function ActorCriticPolicy._build_mlp_extractor at 0x7b79cceaec20>", "_build": "<function ActorCriticPolicy._build at 0x7b79cceaecb0>", "forward": "<function ActorCriticPolicy.forward at 0x7b79cceaed40>", "extract_features": "<function ActorCriticPolicy.extract_features at 0x7b79cceaedd0>", "_get_action_dist_from_latent": "<function ActorCriticPolicy._get_action_dist_from_latent at 0x7b79cceaee60>", "_predict": "<function ActorCriticPolicy._predict at 0x7b79cceaeef0>", "evaluate_actions": "<function ActorCriticPolicy.evaluate_actions at 0x7b79cceaef80>", "get_distribution": "<function ActorCriticPolicy.get_distribution at 0x7b79cceaf010>", "predict_values": "<function ActorCriticPolicy.predict_values at 0x7b79cceaf0a0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7b79d986fd80>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVowAAAAAAAAB9lCiMDGxvZ19zdGRfaW5pdJRK/v///4wKb3J0aG9faW5pdJSJjA9vcHRpbWl6ZXJfY2xhc3OUjBN0b3JjaC5vcHRpbS5ybXNwcm9wlIwHUk1TcHJvcJSTlIwQb3B0aW1pemVyX2t3YXJnc5R9lCiMBWFscGhhlEc/764UeuFHrowDZXBzlEc+5Pi1iONo8YwMd2VpZ2h0X2RlY2F5lEsAdXUu", "log_std_init": -2, "ortho_init": false, "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "num_timesteps": 2500000, "_total_timesteps": 2500000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1690168510310218753, "learning_rate": 0.001, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVxQIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSS91c3IvbG9jYWwvbGliL3B5dGhvbjMuMTAvZGlzdC1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuCQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjEkvdXNyL2xvY2FsL2xpYi9weXRob24zLjEwL2Rpc3QtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz9QYk3S8an8hZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "_last_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVNQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJbAAQAAAAAAAIXNib9ORIG/Rszlvoq8Wb/SsAo/M1pdPdRWSz9I7AJAdcTlPbMFFL9q4IW/uiADPdRrfT9jogfA1layv5l1uD+t0a8/skxav4BR0j4bm/a+aIvfPgx2i79s27e/i2EzPxkoET/PKqA+YSoYPzBvhz/X2+s+242av5+NZr9EMyC/+cnyvV1Tir//Ot2+DVyRPw49lz+W1Gk+X3c2v43GGb9u7ak/xYSPvDf0zr+XpTW/CF6uv9j4hD8/4QDA3ikMvrXiGr8z4RxALxOkP1ONob4ZKBE/zyqgPmEqGD+Q8nG/M8ABPlkHcD7jxio/fqfBvmMqbzwjuNE+fxv/vkGDPr66eRS/iBkJPwFDGD4phPM93WrLvk5iKT8ZGwU/eNHWPCy73T7CSCo9lE4qv3SlbD3kcbi/dTkQOcerqT8zfka9GSgRP88qoD5hKhg/kPJxvxIZGD/7crK/8R34v/FDhz7PXZG+n1guPxZIhr2HuXg/0rViP7Ucpz+uf5a+TFRSPN/c7j4tsxc/ptS5v5FyrD+yaEG+esQEwAGY/7+VwlI+Jjqqv3Nr7D5r5Ew/X8QDQBkoET/PKqA+YSoYPzBvhz+UjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwRLHIaUjAFDlHSUUpQu"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVNQIAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJbAAQAAAAAAAAAAAABFkEu0AACAPwAAAAAAAAAAAAAAAAAAAAAAAACA7iTNvAAAAAAn0Pu/AAAAAEiQ970AAAAAO8b6PwAAAAB66RE8AAAAAOI7+z8AAAAAhW+YvQAAAAC8pOu/AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA5xfWNQAAgD8AAAAAAAAAAAAAAAAAAAAAAAAAgPXs7b0AAAAALmv4vwAAAADeHgc+AAAAAFr++D8AAAAAQnmBvQAAAADiRu4/AAAAABdn9z0AAAAA3pngvwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAOF5EzIAAIA/AAAAAAAAAAAAAAAAAAAAAAAAAICKpF48AAAAAK8z778AAAAAOZFJvQAAAAC/QwFAAAAAAMjxhT0AAAAAwtLbPwAAAADqtcU6AAAAAFC94b8AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACHelY2AACAPwAAAAAAAAAAAAAAAAAAAAAAAACAlqq/vQAAAACCiN2/AAAAACdJ0D0AAAAAffjwPwAAAACHGQ6+AAAAABcz7j8AAAAAaI+qvQAAAACqRv2/AAAAAAAAAAAAAAAAAAAAAAAAAACUjAVudW1weZSMBWR0eXBllJOUjAJmNJSJiIeUUpQoSwOMATyUTk5OSv////9K/////0sAdJRiSwRLHIaUjAFDlHSUUpQu"}, "_episode_num": 0, "use_sde": true, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVQQwAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHQJAp0wztTk2MAWyUTegDjAF0lEdAsH6AFqzqr3V9lChoBkdAkoX90zTF2mgHTegDaAhHQLB/KMBIWgx1fZQoaAZHQJFgIi1RceNoB03oA2gIR0Cwg2+WjXWfdX2UKGgGR0CRVieUpuuSaAdN6ANoCEdAsINyTkhib3V9lChoBkdAiKhplz2ex2gHTegDaAhHQLCE/X3xnWd1fZQoaAZHQJNrYhib2DhoB03oA2gIR0CwhaeGXXyzdX2UKGgGR0CSAtgntv4uaAdN6ANoCEdAsIsDOB19v3V9lChoBkdAhu+mIsRQJ2gHTegDaAhHQLCLBfhddE91fZQoaAZHQIx1rFbVz6toB03oA2gIR0CwjIuRoysTdX2UKGgGR0CPLCf+0gKXaAdN6ANoCEdAsI0xAv+OwXV9lChoBkdAkI1X3cpLEmgHTegDaAhHQLCRW27Wd3B1fZQoaAZHQI9V4hhYvFpoB03oA2gIR0CwkV5KzzErdX2UKGgGR0CUqJk7OmiyaAdN6ANoCEdAsJLibhFVk3V9lChoBkdAkhbDBZZB9mgHTegDaAhHQLCThQEIPbx1fZQoaAZHQJNz92B8QZpoB03oA2gIR0CwmNOXE61cdX2UKGgGR0CTyDqxC6YmaAdN6ANoCEdAsJjWWGATZnV9lChoBkdAg0yOeSSvDGgHTegDaAhHQLCaYq6e5Fx1fZQoaAZHQJV/bxYq5LBoB03oA2gIR0CwmwXuZ1FIdX2UKGgGR0CR5ksySFGoaAdN6ANoCEdAsJ8/rgOz6nV9lChoBkdAlLoIrvsqrmgHTegDaAhHQLCfQn6VMVV1fZQoaAZHQJRk4QxvegtoB03oA2gIR0CwoM13hXKbdX2UKGgGR0CTCbw3YL9daAdN6ANoCEdAsKFzUTcqOXV9lChoBkdAjMgsPBi1A2gHTegDaAhHQLCm0NJe3QV1fZQoaAZHQJDCPmp2ll9oB03oA2gIR0CwptOSr5qNdX2UKGgGR0CLu2LRa5f/aAdN6ANoCEdAsKhXq1PWQXV9lChoBkdAipLNjkMkQmgHTegDaAhHQLCo9djG1hN1fZQoaAZHQIhkW0ojOcFoB03oA2gIR0CwrTJ97WupdX2UKGgGR0CFXCkCV8kVaAdN6ANoCEdAsK01WFN+LHV9lChoBkdAhTHCE6DGtWgHTegDaAhHQLCuu7Dl5nl1fZQoaAZHQIZQJllK9PFoB03oA2gIR0Cwr2BMSK3vdX2UKGgGR0CJYWdlum78aAdN6ANoCEdAsLS3mozeoHV9lChoBkdAh1A7yhBZ6mgHTegDaAhHQLC0uk8Rtgt1fZQoaAZHQIyYogNgBtFoB03oA2gIR0CwtkRxgiNbdX2UKGgGR0CPr0w6hg3MaAdN6ANoCEdAsLbpj6N2knV9lChoBkdAkgZXbmEGq2gHTegDaAhHQLC7HMqz7dl1fZQoaAZHQJIfaCuloDhoB03oA2gIR0Cwux+UliSadX2UKGgGR0CSPYeOGTLXaAdN6ANoCEdAsLyt3MY/FHV9lChoBkdAkk/Fp9JBgWgHTegDaAhHQLC9fWom5Ud1fZQoaAZHQJTGxftx+8ZoB03oA2gIR0CwwpzynUDudX2UKGgGR0CRsNYvFm4BaAdN6ANoCEdAsMKfrAxi5XV9lChoBkdAkxr84tHx0GgHTegDaAhHQLDEIIxQBPt1fZQoaAZHQJD5hv3rUspoB03oA2gIR0CwxMHBpHqedX2UKGgGR0CW9d2Jiy6daAdN6ANoCEdAsMj5Oj7AL3V9lChoBkdAkRYeAVfu1GgHTegDaAhHQLDI/BjnV5N1fZQoaAZHQJYPdr9ETg5oB03oA2gIR0Cwyo0KNQ0odX2UKGgGR0CO3//qgRK6aAdN6ANoCEdAsMt7DziCKHV9lChoBkdAkVBKxkd3jmgHTegDaAhHQLDQj80k4WF1fZQoaAZHQJVE49SuQp5oB03oA2gIR0Cw0JKoddVvdX2UKGgGR0A7JhmXgLqmaAdLemgIR0Cw0VU6PsAvdX2UKGgGR0CR8Xy1eBxxaAdN6ANoCEdAsNIbhsImgXV9lChoBkdAk/GfUrkKeGgHTegDaAhHQLDSw3rD6311fZQoaAZHQDxgmAskIHFoB0tqaAhHQLDSyrkKeCl1fZQoaAZHQIjhE8FINExoB03oA2gIR0Cw1wPBeokzdX2UKGgGR0CJkRrAxi5NaAdN6ANoCEdAsNfQXYUWVXV9lChoBkdAf+WFH8TBZmgHTegDaAhHQLDZub4agmJ1fZQoaAZHQIspLhLoOhFoB03oA2gIR0Cw2cL655JLdX2UKGgGR0A/yBguyu6maAdLZ2gIR0Cw2sz2alUIdX2UKGgGR0CQCFsAvL5iaAdN6ANoCEdAsN6ukyk9EHV9lChoBkdAj6PhbW3BpGgHTegDaAhHQLDfd08vEjx1fZQoaAZHQI5l07nxJ/ZoB03oA2gIR0Cw4OIYzi0fdX2UKGgGR0CI+4e5Fw1jaAdN6ANoCEdAsOGURUWEb3V9lChoBkdAkeHN4RmK7GgHTegDaAhHQLDlFUWl/H51fZQoaAZHQI8/vQD3dsVoB03oA2gIR0Cw5e+fmLccdX2UKGgGR0CTk539aUzLaAdN6ANoCEdAsOf1+NLlFXV9lChoBkdAkTDqYAsCk2gHTegDaAhHQLDpBokRjBl1fZQoaAZHQIjqJ+jM3ZRoB03oA2gIR0Cw7KgJkXk6dX2UKGgGR0CRUBBikO7QaAdN6ANoCEdAsO1qwjdHlXV9lChoBkdAkDYVQMx46mgHTegDaAhHQLDu2PXkHUt1fZQoaAZHQIt+rnmq5sloB03oA2gIR0Cw74paFEiMdX2UKGgGR0CJCYUX531SaAdN6ANoCEdAsPMSesgdO3V9lChoBkdAkK2jIq9XcWgHTegDaAhHQLD0I1ivxH51fZQoaAZHQJMfrjp9qlBoB03oA2gIR0Cw9jvKyOaOdX2UKGgGR0CNNhqpLmITaAdN6ANoCEdAsPcinfl6q3V9lChoBkdAi8G3Q2MsH2gHTegDaAhHQLD6xDZlFtt1fZQoaAZHQIunU/yGzrxoB03oA2gIR0Cw+42fGuLadX2UKGgGR0CJCrPoFFDwaAdN6ANoCEdAsPz9TaTOgXV9lChoBkdAhbA5rP+n62gHTegDaAhHQLD9sM2FWXF1fZQoaAZHQJDMu5TZQHloB03oA2gIR0CxAYgBtDUmdX2UKGgGR0CREYvQWvbHaAdN6ANoCEdAsQKkgeRxLnV9lChoBkdAiNBsfA9FF2gHTegDaAhHQLEEoIXj2jB1fZQoaAZHQIVkqW1MM7VoB03oA2gIR0CxBVTGLk0adX2UKGgGR0CPHu44Ia99aAdN6ANoCEdAsQjUqmTC+HV9lChoBkdAkkmG3WnTAmgHTegDaAhHQLEJk36AOKB1fZQoaAZHQJM0qbc45tFoB03oA2gIR0CxCvl3Y+SsdX2UKGgGR0CQKTfjCHh1aAdN6ANoCEdAsQujVNHpbHV9lChoBkdAkRjokAxSHmgHTegDaAhHQLEPeT+ee4F1fZQoaAZHQJGzviXIEKVoB03oA2gIR0CxEJkdBBzFdX2UKGgGR0CTEs/IsAeaaAdN6ANoCEdAsRJn4FiazHV9lChoBkdAkbj0XDWK/GgHTegDaAhHQLETGimEXch1fZQoaAZHQJWk+ZLIxQBoB03oA2gIR0CxFqsWGh24dX2UKGgGR0CVK41ndweeaAdN6ANoCEdAsRd9irksBnV9lChoBkdAkirjcVQAMmgHTegDaAhHQLEY8F/QSjB1fZQoaAZHQIlGzWwu/URoB03oA2gIR0CxGZ/c8DB/dX2UKGgGR0CKLgzTF2mpaAdN6ANoCEdAsR3X1TR6W3V9lChoBkdAkKfAVKwpv2gHTegDaAhHQLEe/cLSeAd1fZQoaAZHQJBe5QIldC5oB03oA2gIR0CxIH9tIkJKdX2UKGgGR0CHixQbdadMaAdN6ANoCEdAsSFB8UmD2HV9lChoBkdAkmjvkili0GgHTegDaAhHQLEl4f/3nIR1fZQoaAZHQJPvAQPI4l1oB03oA2gIR0CxJqbkS26TdX2UKGgGR0CJihU8V58jaAdN6ANoCEdAsSgYVFhG6XVlLg=="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 78125, "n_steps": 8, "gamma": 0.99, "gae_lambda": 0.9, "ent_coef": 0.0, "vf_coef": 0.4, "max_grad_norm": 0.5, "normalize_advantage": false, "observation_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQIAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLHIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWcAAAAAAAAAAAAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/AACA/wAAgP8AAID/lGgLSxyFlIwBQ5R0lFKUjARoaWdolGgTKJZwAAAAAAAAAAAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH8AAIB/AACAfwAAgH+UaAtLHIWUaBZ0lFKUjA1ib3VuZGVkX2JlbG93lGgTKJYcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUaAiMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLHIWUaBZ0lFKUjA1ib3VuZGVkX2Fib3ZllGgTKJYcAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACUaCJLHIWUaBZ0lFKUjApfbnBfcmFuZG9tlE51Yi4=", "dtype": "float32", "_shape": [28], "low": "[-inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf\n -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf -inf]", "high": "[inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf inf\n inf inf inf inf inf inf inf inf inf inf]", "bounded_below": "[False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False]", "bounded_above": "[False False False False False False False False False False False False\n False False False False False False False False False False False False\n False False False False]", "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVpQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLCIWUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWIAAAAAAAAAAAAIC/AACAvwAAgL8AAIC/AACAvwAAgL8AAIC/AACAv5RoC0sIhZSMAUOUdJRSlIwEaGlnaJRoEyiWIAAAAAAAAAAAAIA/AACAPwAAgD8AAIA/AACAPwAAgD8AAIA/AACAP5RoC0sIhZRoFnSUUpSMDWJvdW5kZWRfYmVsb3eUaBMolggAAAAAAAAAAQEBAQEBAQGUaAiMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLCIWUaBZ0lFKUjA1ib3VuZGVkX2Fib3ZllGgTKJYIAAAAAAAAAAEBAQEBAQEBlGgiSwiFlGgWdJRSlIwKX25wX3JhbmRvbZROdWIu", "dtype": "float32", "_shape": [8], "low": "[-1. -1. -1. -1. -1. -1. -1. -1.]", "high": "[1. 1. 1. 1. 1. 1. 1. 1.]", "bounded_below": "[ True True True True True True True True]", "bounded_above": "[ True True True True True True True True]", "_np_random": null}, "n_envs": 4, "system_info": {"OS": "Linux-5.15.109+-x86_64-with-glibc2.35 # 1 SMP Fri Jun 9 10:57:30 UTC 2023", "Python": "3.10.6", "Stable-Baselines3": "1.8.0", "PyTorch": "2.0.1+cu118", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
replay.mp4
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:5396c6c4ba8b90aeb73686499a1bd3fed2c11bc02b285e8a013473a71cbb99a4
|
3 |
+
size 1122566
|
results.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"mean_reward":
|
|
|
1 |
+
{"mean_reward": 1098.7294492264045, "std_reward": 195.9682372369304, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-07-24T05:03:21.179783"}
|
vec_normalize.pkl
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 2176
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:642ed2017127c1f1b8896f9cd99115796950239551ac492a7e78222dfbd907e5
|
3 |
size 2176
|