Initial commit
Browse files- README.md +1 -1
- a2c-PandaReachDense-v2.zip +2 -2
- a2c-PandaReachDense-v2/data +20 -18
- a2c-PandaReachDense-v2/policy.optimizer.pth +2 -2
- a2c-PandaReachDense-v2/policy.pth +2 -2
- config.json +1 -1
- replay.mp4 +0 -0
- results.json +1 -1
- vec_normalize.pkl +1 -1
README.md
CHANGED
@@ -16,7 +16,7 @@ model-index:
|
|
16 |
type: PandaReachDense-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
-
value: -
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
|
|
16 |
type: PandaReachDense-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
+
value: -0.62 +/- 0.28
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
a2c-PandaReachDense-v2.zip
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a875dc527aadc150e5207fef221ccd126f9ee0ac3b03d40e2fe203d1bb1cfb99
|
3 |
+
size 109536
|
a2c-PandaReachDense-v2/data
CHANGED
@@ -4,14 +4,16 @@
|
|
4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
-
"__init__": "<function MultiInputActorCriticPolicy.__init__ at
|
8 |
"__abstractmethods__": "frozenset()",
|
9 |
-
"_abc_impl": "<_abc._abc_data object at
|
10 |
},
|
11 |
"verbose": 1,
|
12 |
"policy_kwargs": {
|
13 |
":type:": "<class 'dict'>",
|
14 |
-
":serialized:": "
|
|
|
|
|
15 |
"optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>",
|
16 |
"optimizer_kwargs": {
|
17 |
"alpha": 0.99,
|
@@ -46,19 +48,19 @@
|
|
46 |
"_num_timesteps_at_start": 0,
|
47 |
"seed": null,
|
48 |
"action_noise": null,
|
49 |
-
"start_time":
|
50 |
-
"learning_rate": 0.
|
51 |
"tensorboard_log": null,
|
52 |
"lr_schedule": {
|
53 |
":type:": "<class 'function'>",
|
54 |
-
":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/
|
55 |
},
|
56 |
"_last_obs": {
|
57 |
":type:": "<class 'collections.OrderedDict'>",
|
58 |
-
":serialized:": "
|
59 |
-
"achieved_goal": "[[0.
|
60 |
-
"desired_goal": "[[-
|
61 |
-
"observation": "[[
|
62 |
},
|
63 |
"_last_episode_starts": {
|
64 |
":type:": "<class 'numpy.ndarray'>",
|
@@ -66,29 +68,29 @@
|
|
66 |
},
|
67 |
"_last_original_obs": {
|
68 |
":type:": "<class 'collections.OrderedDict'>",
|
69 |
-
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
70 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
71 |
-
"desired_goal": "[[
|
72 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
73 |
},
|
74 |
"_episode_num": 0,
|
75 |
-
"use_sde":
|
76 |
"sde_sample_freq": -1,
|
77 |
"_current_progress_remaining": 0.0,
|
78 |
"ep_info_buffer": {
|
79 |
":type:": "<class 'collections.deque'>",
|
80 |
-
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
81 |
},
|
82 |
"ep_success_buffer": {
|
83 |
":type:": "<class 'collections.deque'>",
|
84 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
85 |
},
|
86 |
-
"_n_updates":
|
87 |
-
"n_steps":
|
88 |
"gamma": 0.99,
|
89 |
-
"gae_lambda":
|
90 |
"ent_coef": 0.0,
|
91 |
-
"vf_coef": 0.
|
92 |
"max_grad_norm": 0.5,
|
93 |
"normalize_advantage": false
|
94 |
}
|
|
|
4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f44d53cf040>",
|
8 |
"__abstractmethods__": "frozenset()",
|
9 |
+
"_abc_impl": "<_abc._abc_data object at 0x7f44d53cb9c0>"
|
10 |
},
|
11 |
"verbose": 1,
|
12 |
"policy_kwargs": {
|
13 |
":type:": "<class 'dict'>",
|
14 |
+
":serialized:": "gAWVowAAAAAAAAB9lCiMDGxvZ19zdGRfaW5pdJRK/v///4wKb3J0aG9faW5pdJSJjA9vcHRpbWl6ZXJfY2xhc3OUjBN0b3JjaC5vcHRpbS5ybXNwcm9wlIwHUk1TcHJvcJSTlIwQb3B0aW1pemVyX2t3YXJnc5R9lCiMBWFscGhhlEc/764UeuFHrowDZXBzlEc+5Pi1iONo8YwMd2VpZ2h0X2RlY2F5lEsAdXUu",
|
15 |
+
"log_std_init": -2,
|
16 |
+
"ortho_init": false,
|
17 |
"optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>",
|
18 |
"optimizer_kwargs": {
|
19 |
"alpha": 0.99,
|
|
|
48 |
"_num_timesteps_at_start": 0,
|
49 |
"seed": null,
|
50 |
"action_noise": null,
|
51 |
+
"start_time": 1680145745945052324,
|
52 |
+
"learning_rate": 0.0009,
|
53 |
"tensorboard_log": null,
|
54 |
"lr_schedule": {
|
55 |
":type:": "<class 'function'>",
|
56 |
+
":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/TX2/SH/LkoWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="
|
57 |
},
|
58 |
"_last_obs": {
|
59 |
":type:": "<class 'collections.OrderedDict'>",
|
60 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAkr2aPipMdrxd/Qg/kr2aPipMdrxd/Qg/kr2aPipMdrxd/Qg/kr2aPipMdrxd/Qg/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAPpt0v3HFxT+9aSc9ZrINv8kQjr+WnHo/G8GCvwJIr79z2G6/mQm5vcWXvj4gbAC/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAACSvZo+Kkx2vF39CD+Mr0w9VOMOu7InRj2SvZo+Kkx2vF39CD+Mr0w9VOMOu7InRj2SvZo+Kkx2vF39CD+Mr0w9VOMOu7InRj2SvZo+Kkx2vF39CD+Mr0w9VOMOu7InRj2UaA5LBEsGhpRoEnSUUpR1Lg==",
|
61 |
+
"achieved_goal": "[[ 0.30222756 -0.01503281 0.535116 ]\n [ 0.30222756 -0.01503281 0.535116 ]\n [ 0.30222756 -0.01503281 0.535116 ]\n [ 0.30222756 -0.01503281 0.535116 ]]",
|
62 |
+
"desired_goal": "[[-0.9554938 1.5450879 0.04087232]\n [-0.5535034 -1.1098872 0.9789518 ]\n [-1.0215181 -1.369385 -0.93299025]\n [-0.09035034 0.37225166 -0.50164986]]",
|
63 |
+
"observation": "[[ 0.30222756 -0.01503281 0.535116 0.0499721 -0.0021803 0.0483777 ]\n [ 0.30222756 -0.01503281 0.535116 0.0499721 -0.0021803 0.0483777 ]\n [ 0.30222756 -0.01503281 0.535116 0.0499721 -0.0021803 0.0483777 ]\n [ 0.30222756 -0.01503281 0.535116 0.0499721 -0.0021803 0.0483777 ]]"
|
64 |
},
|
65 |
"_last_episode_starts": {
|
66 |
":type:": "<class 'numpy.ndarray'>",
|
|
|
68 |
},
|
69 |
"_last_original_obs": {
|
70 |
":type:": "<class 'collections.OrderedDict'>",
|
71 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAsa4JvqFhuD2EVIk8uq0TvhhvcL0xAYQ9D9qQvU2pA776KTQ9oORjvZgYpT0hN1I+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==",
|
72 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
73 |
+
"desired_goal": "[[-0.13445546 0.09002996 0.01676393]\n [-0.1442174 -0.0586997 0.0644554 ]\n [-0.07072841 -0.12857552 0.04398534]\n [-0.05563796 0.08061332 0.20528843]]",
|
74 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
75 |
},
|
76 |
"_episode_num": 0,
|
77 |
+
"use_sde": true,
|
78 |
"sde_sample_freq": -1,
|
79 |
"_current_progress_remaining": 0.0,
|
80 |
"ep_info_buffer": {
|
81 |
":type:": "<class 'collections.deque'>",
|
82 |
+
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIVcA9z5+257+UhpRSlIwBbJRLMowBdJRHQLou0iGFi8Z1fZQoaAZoCWgPQwiLpUi+Esjgv5SGlFKUaBVLMmgWR0C6LrSe2/i6dX2UKGgGaAloD0MIos9HGXEB2L+UhpRSlGgVSzJoFkdAui6Wi5/b03V9lChoBmgJaA9DCGqiz0cZsfC/lIaUUpRoFUsyaBZHQLoueD8+A3F1fZQoaAZoCWgPQwhS0Vj7O9vav5SGlFKUaBVLMmgWR0C6L1rupjtpdX2UKGgGaAloD0MI3GeVmdL62b+UhpRSlGgVSzJoFkdAui89bRneznV9lChoBmgJaA9DCAsIrYcvk+e/lIaUUpRoFUsyaBZHQLovHbrC3w11fZQoaAZoCWgPQwjG3/YEie3gv5SGlFKUaBVLMmgWR0C6Lv7pNbkfdX2UKGgGaAloD0MIbM1WXvI/4L+UhpRSlGgVSzJoFkdAui/pyDIzWXV9lChoBmgJaA9DCMRdvYqMDuS/lIaUUpRoFUsyaBZHQLovzI3BHkN1fZQoaAZoCWgPQwihvI+jOTLlv5SGlFKUaBVLMmgWR0C6L6zakAPvdX2UKGgGaAloD0MInKOOjquR2r+UhpRSlGgVSzJoFkdAui+OCXhOxnV9lChoBmgJaA9DCCz1LAjlfeC/lIaUUpRoFUsyaBZHQLoweINVinZ1fZQoaAZoCWgPQwhIbk26LdHzv5SGlFKUaBVLMmgWR0C6MFr+1jRVdX2UKGgGaAloD0MIurw5XKs96r+UhpRSlGgVSzJoFkdAujA7SMLncXV9lChoBmgJaA9DCKphvyfWqcy/lIaUUpRoFUsyaBZHQLowHG9pRGd1fZQoaAZoCWgPQwhAMh06Pe/Qv5SGlFKUaBVLMmgWR0C6MSXiJfpmdX2UKGgGaAloD0MIQUZAhSNI3L+UhpRSlGgVSzJoFkdAujEItf5ULnV9lChoBmgJaA9DCCL8i6Axk9+/lIaUUpRoFUsyaBZHQLow6T4tYjl1fZQoaAZoCWgPQwjU0XE1sivjv5SGlFKUaBVLMmgWR0C6MMrG3nZCdX2UKGgGaAloD0MIyXISSl8I1r+UhpRSlGgVSzJoFkdAujIVGnXNDHV9lChoBmgJaA9DCGMraFpiZdq/lIaUUpRoFUsyaBZHQLox+AtFrmB1fZQoaAZoCWgPQwi+g584gH7av5SGlFKUaBVLMmgWR0C6MdjJEH+qdX2UKGgGaAloD0MID39N1qgH7r+UhpRSlGgVSzJoFkdAujG6coYvWnV9lChoBmgJaA9DCNMtO8Q/bOu/lIaUUpRoFUsyaBZHQLoy8gKnei11fZQoaAZoCWgPQwitiQW+olvWv5SGlFKUaBVLMmgWR0C6MtTHwPRRdX2UKGgGaAloD0MIQUmBBTBl07+UhpRSlGgVSzJoFkdAujK1X/5tWXV9lChoBmgJaA9DCCaOPBBZpN+/lIaUUpRoFUsyaBZHQLoylt/nW8R1fZQoaAZoCWgPQwh6VWe1wF7yv5SGlFKUaBVLMmgWR0C6M+9dmg8KdX2UKGgGaAloD0MIcjJxqyAG37+UhpRSlGgVSzJoFkdAujPSO5rgwXV9lChoBmgJaA9DCB6lEp7Q696/lIaUUpRoFUsyaBZHQLozsr0aqCJ1fZQoaAZoCWgPQwhGtYgoJu/hv5SGlFKUaBVLMmgWR0C6M5RSLqD9dX2UKGgGaAloD0MIB7e1hecl5r+UhpRSlGgVSzJoFkdAujTh8qnWKHV9lChoBmgJaA9DCEKUL2ghgeu/lIaUUpRoFUsyaBZHQLo0xMOf/WF1fZQoaAZoCWgPQwgZ/z7jwgHkv5SGlFKUaBVLMmgWR0C6NKWGmDUWdX2UKGgGaAloD0MIjliLTwGw87+UhpRSlGgVSzJoFkdAujSHD/EOy3V9lChoBmgJaA9DCIy61t6nquq/lIaUUpRoFUsyaBZHQLo11mPHT7V1fZQoaAZoCWgPQwj75ZMVw1Xov5SGlFKUaBVLMmgWR0C6Nbl0knkUdX2UKGgGaAloD0MImgrxSLy84b+UhpRSlGgVSzJoFkdAujWaKjzqbHV9lChoBmgJaA9DCEHxY8xdi/q/lIaUUpRoFUsyaBZHQLo1e8vVVgh1fZQoaAZoCWgPQwi5pdWQuEfvv5SGlFKUaBVLMmgWR0C6Nn4593KTdX2UKGgGaAloD0MIZ0XURJ+P9L+UhpRSlGgVSzJoFkdAujZgmsvIwXV9lChoBmgJaA9DCGhBKO/j6OK/lIaUUpRoFUsyaBZHQLo2QMPz4Dd1fZQoaAZoCWgPQwgQroBCPX3av5SGlFKUaBVLMmgWR0C6NiHfMwDedX2UKGgGaAloD0MIXFX2XRH81L+UhpRSlGgVSzJoFkdAujcOVII4VHV9lChoBmgJaA9DCAexM4XO6+e/lIaUUpRoFUsyaBZHQLo28M5fdAR1fZQoaAZoCWgPQwhVih2NQ/3nv5SGlFKUaBVLMmgWR0C6NtEVWS2ZdX2UKGgGaAloD0MIkjzX9+Eg+r+UhpRSlGgVSzJoFkdAujayHxjJ+3V9lChoBmgJaA9DCGsPe6GAbf2/lIaUUpRoFUsyaBZHQLo3mkE9t/F1fZQoaAZoCWgPQwjQJodPOpHlv5SGlFKUaBVLMmgWR0C6N3zEehf0dX2UKGgGaAloD0MIIjgu46ZG8b+UhpRSlGgVSzJoFkdAujdc7p3X7XV9lChoBmgJaA9DCF1r71NV6OC/lIaUUpRoFUsyaBZHQLo3Pic5Ke11fZQoaAZoCWgPQwgl5llJK77nv5SGlFKUaBVLMmgWR0C6OCuj2zv7dX2UKGgGaAloD0MIERssnKT55r+UhpRSlGgVSzJoFkdAujgOIeo1k3V9lChoBmgJaA9DCOrKZ3ke3Pi/lIaUUpRoFUsyaBZHQLo37ljEvTR1fZQoaAZoCWgPQwilLhnHSPbYv5SGlFKUaBVLMmgWR0C6N8+ARTS9dX2UKGgGaAloD0MI4SnkSj0Lvr+UhpRSlGgVSzJoFkdAuji2TJQtSXV9lChoBmgJaA9DCIKsp1ZfXeu/lIaUUpRoFUsyaBZHQLo4mLkCFK11fZQoaAZoCWgPQwixpx3+mqzav5SGlFKUaBVLMmgWR0C6OHj3RG+cdX2UKGgGaAloD0MImBdgH5269L+UhpRSlGgVSzJoFkdAujhaECeVcHV9lChoBmgJaA9DCHPWpxyTxd2/lIaUUpRoFUsyaBZHQLo5RhfShJ11fZQoaAZoCWgPQwjNlNbfEgDhv5SGlFKUaBVLMmgWR0C6OSiNbTttdX2UKGgGaAloD0MIzPCfbqBA5r+UhpRSlGgVSzJoFkdAujkI1YQrc3V9lChoBmgJaA9DCL9iDRe5p+S/lIaUUpRoFUsyaBZHQLo46eMhouh1fZQoaAZoCWgPQwiBIECGjh3mv5SGlFKUaBVLMmgWR0C6OdQM2FWXdX2UKGgGaAloD0MIf05BfjZy1L+UhpRSlGgVSzJoFkdAujm2gHu7YnV9lChoBmgJaA9DCI7O+SmOA9e/lIaUUpRoFUsyaBZHQLo5lqgh8pl1fZQoaAZoCWgPQwjlRLsKKT/tv5SGlFKUaBVLMmgWR0C6OXfSDyvtdX2UKGgGaAloD0MIzF1LyAc90L+UhpRSlGgVSzJoFkdAujphDc/MXHV9lChoBmgJaA9DCB/11yssOOK/lIaUUpRoFUsyaBZHQLo6Q5UcXFd1fZQoaAZoCWgPQwgfaXBbW/jgv5SGlFKUaBVLMmgWR0C6OiPYao/BdX2UKGgGaAloD0MIXhQ98DFY8r+UhpRSlGgVSzJoFkdAujoFL6DXe3V9lChoBmgJaA9DCAK4WbxYWPC/lIaUUpRoFUsyaBZHQLo66+FDfFd1fZQoaAZoCWgPQwh7TKQ0m0fnv5SGlFKUaBVLMmgWR0C6Os45ggHNdX2UKGgGaAloD0MIUFPL1vqi6b+UhpRSlGgVSzJoFkdAujqubtqpLnV9lChoBmgJaA9DCFeW6CyzyPi/lIaUUpRoFUsyaBZHQLo6j5HEuQJ1fZQoaAZoCWgPQwhQqRJlb2nwv5SGlFKUaBVLMmgWR0C6O3cXrMTwdX2UKGgGaAloD0MIrWu0HOjh8r+UhpRSlGgVSzJoFkdAujtZnDiwS3V9lChoBmgJaA9DCCFWf4RhwPO/lIaUUpRoFUsyaBZHQLo7Oj3mFJx1fZQoaAZoCWgPQwh+ObNdoQ/wv5SGlFKUaBVLMmgWR0C6Oxtuk1uSdX2UKGgGaAloD0MIkzZV98jm47+UhpRSlGgVSzJoFkdAujwUJdB0IXV9lChoBmgJaA9DCAVpxqLprPK/lIaUUpRoFUsyaBZHQLo79qCpWFN1fZQoaAZoCWgPQwgvF/GdmPXrv5SGlFKUaBVLMmgWR0C6O9dG3F1kdX2UKGgGaAloD0MI6kKs/ghD8r+UhpRSlGgVSzJoFkdAuju4cjqv/3V9lChoBmgJaA9DCPLvMy4cCOu/lIaUUpRoFUsyaBZHQLo8orBj4Hp1fZQoaAZoCWgPQwgSFhVxOkngv5SGlFKUaBVLMmgWR0C6PIYc/+sHdX2UKGgGaAloD0MI6wCIu3oV0b+UhpRSlGgVSzJoFkdAujxnwCr923V9lChoBmgJaA9DCOz3xDpVPuK/lIaUUpRoFUsyaBZHQLo8SQuEmIF1fZQoaAZoCWgPQwhAS1ewjXjmv5SGlFKUaBVLMmgWR0C6PTbxNIsidX2UKGgGaAloD0MIw5/hzRq80b+UhpRSlGgVSzJoFkdAuj0ZdLQHA3V9lChoBmgJaA9DCC+ob5nTZc+/lIaUUpRoFUsyaBZHQLo8+bu+h5B1fZQoaAZoCWgPQwiuZwjHLHvEv5SGlFKUaBVLMmgWR0C6PNrwjMV2dX2UKGgGaAloD0MI3Xu45LjT4b+UhpRSlGgVSzJoFkdAuj3Zepn6EnV9lChoBmgJaA9DCO5aQj7o2fK/lIaUUpRoFUsyaBZHQLo9u+m3vx91fZQoaAZoCWgPQwgKhJ1i1SDWv5SGlFKUaBVLMmgWR0C6PZwe/5+IdX2UKGgGaAloD0MIKULqdvaV6L+UhpRSlGgVSzJoFkdAuj19pYcNpnV9lChoBmgJaA9DCKA1P/7S4vC/lIaUUpRoFUsyaBZHQLo+ZhYeT3Z1fZQoaAZoCWgPQwhHIF7XL9jfv5SGlFKUaBVLMmgWR0C6PkiEDhcadX2UKGgGaAloD0MISGx3D9D94L+UhpRSlGgVSzJoFkdAuj4o0VJti3V9lChoBmgJaA9DCNRi8DDtm+i/lIaUUpRoFUsyaBZHQLo+Cf3N9ph1ZS4="
|
83 |
},
|
84 |
"ep_success_buffer": {
|
85 |
":type:": "<class 'collections.deque'>",
|
86 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
87 |
},
|
88 |
+
"_n_updates": 62500,
|
89 |
+
"n_steps": 8,
|
90 |
"gamma": 0.99,
|
91 |
+
"gae_lambda": 0.9,
|
92 |
"ent_coef": 0.0,
|
93 |
+
"vf_coef": 0.4,
|
94 |
"max_grad_norm": 0.5,
|
95 |
"normalize_advantage": false
|
96 |
}
|
a2c-PandaReachDense-v2/policy.optimizer.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8141026262361bfeb1a83b963a4584711f1d5a5784f5595f17e4ccb569b846a2
|
3 |
+
size 45438
|
a2c-PandaReachDense-v2/policy.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:04d25e1cef9947f579a910894e17b99c426f8f77c3397c34668029e9a7412c8a
|
3 |
+
size 46718
|
config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7fb6ea997700>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7fb6ea995c00>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 2000000, "_total_timesteps": 2000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1680074774317314458, "learning_rate": 0.0007, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/RvAGjbi6x4WUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAtzvZPhHBbTupixU/tzvZPhHBbTupixU/tzvZPhHBbTupixU/tzvZPhHBbTupixU/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAkRicv5rjz7/zKLA/viUxv8HTBD53dxg/UN82PwlebT9C3bM/Cl4dPsM0+D14/NA/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAAC3O9k+EcFtO6mLFT+Q8lW6/B2JuVcD8ru3O9k+EcFtO6mLFT+Q8lW6/B2JuVcD8ru3O9k+EcFtO6mLFT+Q8lW6/B2JuVcD8ru3O9k+EcFtO6mLFT+Q8lW6/B2JuVcD8ruUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[0.4242837 0.00362784 0.5841623 ]\n [0.4242837 0.00362784 0.5841623 ]\n [0.4242837 0.00362784 0.5841623 ]\n [0.4242837 0.00362784 0.5841623 ]]", "desired_goal": "[[-1.2194997 -1.6241333 1.3762497 ]\n [-0.69198215 0.12971403 0.5955729 ]\n [ 0.714345 0.9272161 1.4051898 ]\n [ 0.15367904 0.12119438 1.6327047 ]]", "observation": "[[ 4.2428371e-01 3.6278407e-03 5.8416229e-01 -8.1614498e-04\n -2.6153016e-04 -7.3856520e-03]\n [ 4.2428371e-01 3.6278407e-03 5.8416229e-01 -8.1614498e-04\n -2.6153016e-04 -7.3856520e-03]\n [ 4.2428371e-01 3.6278407e-03 5.8416229e-01 -8.1614498e-04\n -2.6153016e-04 -7.3856520e-03]\n [ 4.2428371e-01 3.6278407e-03 5.8416229e-01 -8.1614498e-04\n -2.6153016e-04 -7.3856520e-03]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAt/SyPI7/oz0Zj4A9SW25vemPeL2biXo92Sn+vWFxm71zyHM9iMOrPZKQAryewps9lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[ 0.0218452 0.08007728 0.06277294]\n [-0.09054048 -0.06068412 0.06116639]\n [-0.12410326 -0.07589985 0.05951734]\n [ 0.08386904 -0.00796904 0.0760548 ]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIWn7gKk8gB8CUhpRSlIwBbJRLMowBdJRHQLpno3iaRZF1fZQoaAZoCWgPQwhVih2NQ30LwJSGlFKUaBVLMmgWR0C6Z4TdcjZ+dX2UKGgGaAloD0MIZW6+Ed1zC8CUhpRSlGgVSzJoFkdAumdgDwH7g3V9lChoBmgJaA9DCH9pUZ/kvhfAlIaUUpRoFUsyaBZHQLpnMfKp1ih1fZQoaAZoCWgPQwgDtRg8THsLwJSGlFKUaBVLMmgWR0C6aCoAwPAgdX2UKGgGaAloD0MILliqC3jZBMCUhpRSlGgVSzJoFkdAumgLZVXFLnV9lChoBmgJaA9DCN+LL9rjtRvAlIaUUpRoFUsyaBZHQLpn5nanJkp1fZQoaAZoCWgPQwjW5ZSAmAQCwJSGlFKUaBVLMmgWR0C6Z7hK+SKWdX2UKGgGaAloD0MIMBAEyNDRFcCUhpRSlGgVSzJoFkdAumj73PAwf3V9lChoBmgJaA9DCGItPgXAWA7AlIaUUpRoFUsyaBZHQLpo3Z00WM11fZQoaAZoCWgPQwj4NCcvMnESwJSGlFKUaBVLMmgWR0C6aLj9jwx4dX2UKGgGaAloD0MIYK5FC9B2DcCUhpRSlGgVSzJoFkdAumiLFHavinV9lChoBmgJaA9DCDwx68VQjhPAlIaUUpRoFUsyaBZHQLpp1Ljghr51fZQoaAZoCWgPQwhLOsrBbAITwJSGlFKUaBVLMmgWR0C6abbmMfihdX2UKGgGaAloD0MIxsN7DiznBcCUhpRSlGgVSzJoFkdAummSwosqa3V9lChoBmgJaA9DCGRXWkbqrRTAlIaUUpRoFUsyaBZHQLppZPjXFtN1fZQoaAZoCWgPQwjlgF1NnnIVwJSGlFKUaBVLMmgWR0C6arORHPNWdX2UKGgGaAloD0MIiSZQxCKWFsCUhpRSlGgVSzJoFkdAumqVaFEiMnV9lChoBmgJaA9DCM1bdR2qCRLAlIaUUpRoFUsyaBZHQLpqcOKwY+B1fZQoaAZoCWgPQwjIXBlUG/wQwJSGlFKUaBVLMmgWR0C6akOM6zVudX2UKGgGaAloD0MImIi3zr/tEcCUhpRSlGgVSzJoFkdAumuYpPRAr3V9lChoBmgJaA9DCMP0vYbg2AbAlIaUUpRoFUsyaBZHQLprepEQXhx1fZQoaAZoCWgPQwgfuwuUFJgHwJSGlFKUaBVLMmgWR0C6a1YWUKRddX2UKGgGaAloD0MIlUVhF0U/FsCUhpRSlGgVSzJoFkdAumsoWUKRdXV9lChoBmgJaA9DCCRh304iohDAlIaUUpRoFUsyaBZHQLpsfVUuL751fZQoaAZoCWgPQwgg8MAAwgcOwJSGlFKUaBVLMmgWR0C6bF8a86FNdX2UKGgGaAloD0MIAyUFFsAUDsCUhpRSlGgVSzJoFkdAumw6nQ6ZIHV9lChoBmgJaA9DCEZDxqNUgg7AlIaUUpRoFUsyaBZHQLpsDNiH6/J1fZQoaAZoCWgPQwh8CoDxDNoNwJSGlFKUaBVLMmgWR0C6bYSTY/VzdX2UKGgGaAloD0MIvth78UXLF8CUhpRSlGgVSzJoFkdAum1mTr3TNXV9lChoBmgJaA9DCAhW1cvvdA/AlIaUUpRoFUsyaBZHQLptQxBVuJl1fZQoaAZoCWgPQwjyzqEMVQkgwJSGlFKUaBVLMmgWR0C6bRU6Lfk4dX2UKGgGaAloD0MI9FDbhlHwFMCUhpRSlGgVSzJoFkdAum5xWZJCjXV9lChoBmgJaA9DCFFoWfeP5RbAlIaUUpRoFUsyaBZHQLpuU1WKdhB1fZQoaAZoCWgPQwjT9UTXhR8QwJSGlFKUaBVLMmgWR0C6bi7P2PDHdX2UKGgGaAloD0MINpVFYRd1GMCUhpRSlGgVSzJoFkdAum4BM36yjnV9lChoBmgJaA9DCJvIzAUujxTAlIaUUpRoFUsyaBZHQLpvTzUZvUB1fZQoaAZoCWgPQwhKRs7CnrYQwJSGlFKUaBVLMmgWR0C6bzDewcHXdX2UKGgGaAloD0MIuMoTCDulAcCUhpRSlGgVSzJoFkdAum8OVbA1vXV9lChoBmgJaA9DCMYxkj1CbQjAlIaUUpRoFUsyaBZHQLpu4MW43FV1fZQoaAZoCWgPQwhfl+E/3WAPwJSGlFKUaBVLMmgWR0C6cDVYhdMTdX2UKGgGaAloD0MI6ITQQZeQF8CUhpRSlGgVSzJoFkdAunAXYChexHV9lChoBmgJaA9DCLn6sUl+RAPAlIaUUpRoFUsyaBZHQLpv8vUjLSx1fZQoaAZoCWgPQwi9qrNaYO8NwJSGlFKUaBVLMmgWR0C6b8U56t1ZdX2UKGgGaAloD0MI9bpFYKyvEMCUhpRSlGgVSzJoFkdAunEs0WM0g3V9lChoBmgJaA9DCHzUX6+wwBDAlIaUUpRoFUsyaBZHQLpxD4qwyIp1fZQoaAZoCWgPQwiUwrzHmeYQwJSGlFKUaBVLMmgWR0C6cOsCPp6hdX2UKGgGaAloD0MIJlXbTfC9EsCUhpRSlGgVSzJoFkdAunC9+mWMTHV9lChoBmgJaA9DCB8tzhjmJBvAlIaUUpRoFUsyaBZHQLpx/+XqqwR1fZQoaAZoCWgPQwhZ3eo56Z0MwJSGlFKUaBVLMmgWR0C6ceGw7kn1dX2UKGgGaAloD0MI+I2vPbMEBsCUhpRSlGgVSzJoFkdAunG9KkEcKnV9lChoBmgJaA9DCDQPYJFfjxDAlIaUUpRoFUsyaBZHQLpxj2c8Tzx1fZQoaAZoCWgPQwjpnJ/iONAWwJSGlFKUaBVLMmgWR0C6cplFtsN2dX2UKGgGaAloD0MIrwW9N4ZgC8CUhpRSlGgVSzJoFkdAunJ6r7waznV9lChoBmgJaA9DCNIZGHlZQxXAlIaUUpRoFUsyaBZHQLpyVcoH9m91fZQoaAZoCWgPQwjBxvXv+mwHwJSGlFKUaBVLMmgWR0C6ciehoM8YdX2UKGgGaAloD0MIrroO1ZRkEcCUhpRSlGgVSzJoFkdAunMY1m8M/nV9lChoBmgJaA9DCL76eOi7mxHAlIaUUpRoFUsyaBZHQLpy+jVQQ+V1fZQoaAZoCWgPQwhlic4yi3AYwJSGlFKUaBVLMmgWR0C6ctVJ+UhWdX2UKGgGaAloD0MIeCXJc33fD8CUhpRSlGgVSzJoFkdAunKnNqxkd3V9lChoBmgJaA9DCFYt6SgHkwfAlIaUUpRoFUsyaBZHQLpzk1dxAB11fZQoaAZoCWgPQwjGUiRfCeQNwJSGlFKUaBVLMmgWR0C6c3Td+G47dX2UKGgGaAloD0MIHt5zYDmSE8CUhpRSlGgVSzJoFkdAunNP/ffoBHV9lChoBmgJaA9DCAGkNnFyXwvAlIaUUpRoFUsyaBZHQLpzIdCE6DJ1fZQoaAZoCWgPQwhzEkpfCBkVwJSGlFKUaBVLMmgWR0C6dBjDbah6dX2UKGgGaAloD0MIstgmFY1VCcCUhpRSlGgVSzJoFkdAunP6QjlgdHV9lChoBmgJaA9DCAMjL2tiYRLAlIaUUpRoFUsyaBZHQLpz1VtXPqt1fZQoaAZoCWgPQwjIQnQIHKkTwJSGlFKUaBVLMmgWR0C6c6ddiUgTdX2UKGgGaAloD0MI5CzsaYdfFsCUhpRSlGgVSzJoFkdAunSVZSvTw3V9lChoBmgJaA9DCMSXiSKkfhTAlIaUUpRoFUsyaBZHQLp0dsq8UVV1fZQoaAZoCWgPQwh9PzVeutkUwJSGlFKUaBVLMmgWR0C6dFHggow3dX2UKGgGaAloD0MI/rW8cr3NDsCUhpRSlGgVSzJoFkdAunQjtkWhy3V9lChoBmgJaA9DCMnnFU89UhDAlIaUUpRoFUsyaBZHQLp1EesPrfN1fZQoaAZoCWgPQwhbXrneNkMUwJSGlFKUaBVLMmgWR0C6dPNgF5fMdX2UKGgGaAloD0MIYfw07s3/EsCUhpRSlGgVSzJoFkdAunTObBoEjnV9lChoBmgJaA9DCF03pbxWQgzAlIaUUpRoFUsyaBZHQLp0oEBbOeJ1fZQoaAZoCWgPQwjpDIy8rLkUwJSGlFKUaBVLMmgWR0C6dY6U3XI2dX2UKGgGaAloD0MIdk8eFmoNFcCUhpRSlGgVSzJoFkdAunVv9ehPCXV9lChoBmgJaA9DCJYhjnVxmw3AlIaUUpRoFUsyaBZHQLp1SwevIOp1fZQoaAZoCWgPQwjzO01mvO0UwJSGlFKUaBVLMmgWR0C6dRzrAxi5dX2UKGgGaAloD0MI+rX10382FsCUhpRSlGgVSzJoFkdAunYHBTGYKXV9lChoBmgJaA9DCHTPukbLoRPAlIaUUpRoFUsyaBZHQLp16HLA57x1fZQoaAZoCWgPQwjGT+Pe/LYQwJSGlFKUaBVLMmgWR0C6dcOIuXeFdX2UKGgGaAloD0MI+wYmN4pMCcCUhpRSlGgVSzJoFkdAunWVUR3/xXV9lChoBmgJaA9DCK3D0VW6WxbAlIaUUpRoFUsyaBZHQLp2hwYcebN1fZQoaAZoCWgPQwjuBPuvc8MXwJSGlFKUaBVLMmgWR0C6dmhrvb48dX2UKGgGaAloD0MIsOYAwRwNFMCUhpRSlGgVSzJoFkdAunZDfNzKcXV9lChoBmgJaA9DCKCp1y0C4w/AlIaUUpRoFUsyaBZHQLp2FUtqYZ51fZQoaAZoCWgPQwiCdRw/VMoTwJSGlFKUaBVLMmgWR0C6dwgRPGhmdX2UKGgGaAloD0MIq6+uCtTCF8CUhpRSlGgVSzJoFkdAunbpf6XSjXV9lChoBmgJaA9DCKPqVzofvgvAlIaUUpRoFUsyaBZHQLp2xMmnfl91fZQoaAZoCWgPQwi9qUiFsVUYwJSGlFKUaBVLMmgWR0C6dpaagElmdX2UKGgGaAloD0MIPMCTFi7rGMCUhpRSlGgVSzJoFkdAuneIekpI+XV9lChoBmgJaA9DCFXBqKROYBHAlIaUUpRoFUsyaBZHQLp3afKISDh1fZQoaAZoCWgPQwjbp+MxAxUXwJSGlFKUaBVLMmgWR0C6d0UUsWfsdX2UKGgGaAloD0MI/Ul87gTbFcCUhpRSlGgVSzJoFkdAuncW9cry2HV9lChoBmgJaA9DCMZrXtVZHRjAlIaUUpRoFUsyaBZHQLp4C6qKgqV1fZQoaAZoCWgPQwi70FynkZYUwJSGlFKUaBVLMmgWR0C6d+4ht+CsdX2UKGgGaAloD0MItOTxtPzQFsCUhpRSlGgVSzJoFkdAunfKREF4cHV9lChoBmgJaA9DCHDpmPOMLRHAlIaUUpRoFUsyaBZHQLp3nIEKVpt1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 100000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.31 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.9.16", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
|
|
1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f44d53cf040>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7f44d53cb9c0>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVowAAAAAAAAB9lCiMDGxvZ19zdGRfaW5pdJRK/v///4wKb3J0aG9faW5pdJSJjA9vcHRpbWl6ZXJfY2xhc3OUjBN0b3JjaC5vcHRpbS5ybXNwcm9wlIwHUk1TcHJvcJSTlIwQb3B0aW1pemVyX2t3YXJnc5R9lCiMBWFscGhhlEc/764UeuFHrowDZXBzlEc+5Pi1iONo8YwMd2VpZ2h0X2RlY2F5lEsAdXUu", "log_std_init": -2, "ortho_init": false, "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 2000000, "_total_timesteps": 2000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1680145745945052324, "learning_rate": 0.0009, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/TX2/SH/LkoWUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAkr2aPipMdrxd/Qg/kr2aPipMdrxd/Qg/kr2aPipMdrxd/Qg/kr2aPipMdrxd/Qg/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAPpt0v3HFxT+9aSc9ZrINv8kQjr+WnHo/G8GCvwJIr79z2G6/mQm5vcWXvj4gbAC/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAACSvZo+Kkx2vF39CD+Mr0w9VOMOu7InRj2SvZo+Kkx2vF39CD+Mr0w9VOMOu7InRj2SvZo+Kkx2vF39CD+Mr0w9VOMOu7InRj2SvZo+Kkx2vF39CD+Mr0w9VOMOu7InRj2UaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 0.30222756 -0.01503281 0.535116 ]\n [ 0.30222756 -0.01503281 0.535116 ]\n [ 0.30222756 -0.01503281 0.535116 ]\n [ 0.30222756 -0.01503281 0.535116 ]]", "desired_goal": "[[-0.9554938 1.5450879 0.04087232]\n [-0.5535034 -1.1098872 0.9789518 ]\n [-1.0215181 -1.369385 -0.93299025]\n [-0.09035034 0.37225166 -0.50164986]]", "observation": "[[ 0.30222756 -0.01503281 0.535116 0.0499721 -0.0021803 0.0483777 ]\n [ 0.30222756 -0.01503281 0.535116 0.0499721 -0.0021803 0.0483777 ]\n [ 0.30222756 -0.01503281 0.535116 0.0499721 -0.0021803 0.0483777 ]\n [ 0.30222756 -0.01503281 0.535116 0.0499721 -0.0021803 0.0483777 ]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAsa4JvqFhuD2EVIk8uq0TvhhvcL0xAYQ9D9qQvU2pA776KTQ9oORjvZgYpT0hN1I+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[-0.13445546 0.09002996 0.01676393]\n [-0.1442174 -0.0586997 0.0644554 ]\n [-0.07072841 -0.12857552 0.04398534]\n [-0.05563796 0.08061332 0.20528843]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": true, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIVcA9z5+257+UhpRSlIwBbJRLMowBdJRHQLou0iGFi8Z1fZQoaAZoCWgPQwiLpUi+Esjgv5SGlFKUaBVLMmgWR0C6LrSe2/i6dX2UKGgGaAloD0MIos9HGXEB2L+UhpRSlGgVSzJoFkdAui6Wi5/b03V9lChoBmgJaA9DCGqiz0cZsfC/lIaUUpRoFUsyaBZHQLoueD8+A3F1fZQoaAZoCWgPQwhS0Vj7O9vav5SGlFKUaBVLMmgWR0C6L1rupjtpdX2UKGgGaAloD0MI3GeVmdL62b+UhpRSlGgVSzJoFkdAui89bRneznV9lChoBmgJaA9DCAsIrYcvk+e/lIaUUpRoFUsyaBZHQLovHbrC3w11fZQoaAZoCWgPQwjG3/YEie3gv5SGlFKUaBVLMmgWR0C6Lv7pNbkfdX2UKGgGaAloD0MIbM1WXvI/4L+UhpRSlGgVSzJoFkdAui/pyDIzWXV9lChoBmgJaA9DCMRdvYqMDuS/lIaUUpRoFUsyaBZHQLovzI3BHkN1fZQoaAZoCWgPQwihvI+jOTLlv5SGlFKUaBVLMmgWR0C6L6zakAPvdX2UKGgGaAloD0MInKOOjquR2r+UhpRSlGgVSzJoFkdAui+OCXhOxnV9lChoBmgJaA9DCCz1LAjlfeC/lIaUUpRoFUsyaBZHQLoweINVinZ1fZQoaAZoCWgPQwhIbk26LdHzv5SGlFKUaBVLMmgWR0C6MFr+1jRVdX2UKGgGaAloD0MIurw5XKs96r+UhpRSlGgVSzJoFkdAujA7SMLncXV9lChoBmgJaA9DCKphvyfWqcy/lIaUUpRoFUsyaBZHQLowHG9pRGd1fZQoaAZoCWgPQwhAMh06Pe/Qv5SGlFKUaBVLMmgWR0C6MSXiJfpmdX2UKGgGaAloD0MIQUZAhSNI3L+UhpRSlGgVSzJoFkdAujEItf5ULnV9lChoBmgJaA9DCCL8i6Axk9+/lIaUUpRoFUsyaBZHQLow6T4tYjl1fZQoaAZoCWgPQwjU0XE1sivjv5SGlFKUaBVLMmgWR0C6MMrG3nZCdX2UKGgGaAloD0MIyXISSl8I1r+UhpRSlGgVSzJoFkdAujIVGnXNDHV9lChoBmgJaA9DCGMraFpiZdq/lIaUUpRoFUsyaBZHQLox+AtFrmB1fZQoaAZoCWgPQwi+g584gH7av5SGlFKUaBVLMmgWR0C6MdjJEH+qdX2UKGgGaAloD0MID39N1qgH7r+UhpRSlGgVSzJoFkdAujG6coYvWnV9lChoBmgJaA9DCNMtO8Q/bOu/lIaUUpRoFUsyaBZHQLoy8gKnei11fZQoaAZoCWgPQwitiQW+olvWv5SGlFKUaBVLMmgWR0C6MtTHwPRRdX2UKGgGaAloD0MIQUmBBTBl07+UhpRSlGgVSzJoFkdAujK1X/5tWXV9lChoBmgJaA9DCCaOPBBZpN+/lIaUUpRoFUsyaBZHQLoylt/nW8R1fZQoaAZoCWgPQwh6VWe1wF7yv5SGlFKUaBVLMmgWR0C6M+9dmg8KdX2UKGgGaAloD0MIcjJxqyAG37+UhpRSlGgVSzJoFkdAujPSO5rgwXV9lChoBmgJaA9DCB6lEp7Q696/lIaUUpRoFUsyaBZHQLozsr0aqCJ1fZQoaAZoCWgPQwhGtYgoJu/hv5SGlFKUaBVLMmgWR0C6M5RSLqD9dX2UKGgGaAloD0MIB7e1hecl5r+UhpRSlGgVSzJoFkdAujTh8qnWKHV9lChoBmgJaA9DCEKUL2ghgeu/lIaUUpRoFUsyaBZHQLo0xMOf/WF1fZQoaAZoCWgPQwgZ/z7jwgHkv5SGlFKUaBVLMmgWR0C6NKWGmDUWdX2UKGgGaAloD0MIjliLTwGw87+UhpRSlGgVSzJoFkdAujSHD/EOy3V9lChoBmgJaA9DCIy61t6nquq/lIaUUpRoFUsyaBZHQLo11mPHT7V1fZQoaAZoCWgPQwj75ZMVw1Xov5SGlFKUaBVLMmgWR0C6Nbl0knkUdX2UKGgGaAloD0MImgrxSLy84b+UhpRSlGgVSzJoFkdAujWaKjzqbHV9lChoBmgJaA9DCEHxY8xdi/q/lIaUUpRoFUsyaBZHQLo1e8vVVgh1fZQoaAZoCWgPQwi5pdWQuEfvv5SGlFKUaBVLMmgWR0C6Nn4593KTdX2UKGgGaAloD0MIZ0XURJ+P9L+UhpRSlGgVSzJoFkdAujZgmsvIwXV9lChoBmgJaA9DCGhBKO/j6OK/lIaUUpRoFUsyaBZHQLo2QMPz4Dd1fZQoaAZoCWgPQwgQroBCPX3av5SGlFKUaBVLMmgWR0C6NiHfMwDedX2UKGgGaAloD0MIXFX2XRH81L+UhpRSlGgVSzJoFkdAujcOVII4VHV9lChoBmgJaA9DCAexM4XO6+e/lIaUUpRoFUsyaBZHQLo28M5fdAR1fZQoaAZoCWgPQwhVih2NQ/3nv5SGlFKUaBVLMmgWR0C6NtEVWS2ZdX2UKGgGaAloD0MIkjzX9+Eg+r+UhpRSlGgVSzJoFkdAujayHxjJ+3V9lChoBmgJaA9DCGsPe6GAbf2/lIaUUpRoFUsyaBZHQLo3mkE9t/F1fZQoaAZoCWgPQwjQJodPOpHlv5SGlFKUaBVLMmgWR0C6N3zEehf0dX2UKGgGaAloD0MIIjgu46ZG8b+UhpRSlGgVSzJoFkdAujdc7p3X7XV9lChoBmgJaA9DCF1r71NV6OC/lIaUUpRoFUsyaBZHQLo3Pic5Ke11fZQoaAZoCWgPQwgl5llJK77nv5SGlFKUaBVLMmgWR0C6OCuj2zv7dX2UKGgGaAloD0MIERssnKT55r+UhpRSlGgVSzJoFkdAujgOIeo1k3V9lChoBmgJaA9DCOrKZ3ke3Pi/lIaUUpRoFUsyaBZHQLo37ljEvTR1fZQoaAZoCWgPQwilLhnHSPbYv5SGlFKUaBVLMmgWR0C6N8+ARTS9dX2UKGgGaAloD0MI4SnkSj0Lvr+UhpRSlGgVSzJoFkdAuji2TJQtSXV9lChoBmgJaA9DCIKsp1ZfXeu/lIaUUpRoFUsyaBZHQLo4mLkCFK11fZQoaAZoCWgPQwixpx3+mqzav5SGlFKUaBVLMmgWR0C6OHj3RG+cdX2UKGgGaAloD0MImBdgH5269L+UhpRSlGgVSzJoFkdAujhaECeVcHV9lChoBmgJaA9DCHPWpxyTxd2/lIaUUpRoFUsyaBZHQLo5RhfShJ11fZQoaAZoCWgPQwjNlNbfEgDhv5SGlFKUaBVLMmgWR0C6OSiNbTttdX2UKGgGaAloD0MIzPCfbqBA5r+UhpRSlGgVSzJoFkdAujkI1YQrc3V9lChoBmgJaA9DCL9iDRe5p+S/lIaUUpRoFUsyaBZHQLo46eMhouh1fZQoaAZoCWgPQwiBIECGjh3mv5SGlFKUaBVLMmgWR0C6OdQM2FWXdX2UKGgGaAloD0MIf05BfjZy1L+UhpRSlGgVSzJoFkdAujm2gHu7YnV9lChoBmgJaA9DCI7O+SmOA9e/lIaUUpRoFUsyaBZHQLo5lqgh8pl1fZQoaAZoCWgPQwjlRLsKKT/tv5SGlFKUaBVLMmgWR0C6OXfSDyvtdX2UKGgGaAloD0MIzF1LyAc90L+UhpRSlGgVSzJoFkdAujphDc/MXHV9lChoBmgJaA9DCB/11yssOOK/lIaUUpRoFUsyaBZHQLo6Q5UcXFd1fZQoaAZoCWgPQwgfaXBbW/jgv5SGlFKUaBVLMmgWR0C6OiPYao/BdX2UKGgGaAloD0MIXhQ98DFY8r+UhpRSlGgVSzJoFkdAujoFL6DXe3V9lChoBmgJaA9DCAK4WbxYWPC/lIaUUpRoFUsyaBZHQLo66+FDfFd1fZQoaAZoCWgPQwh7TKQ0m0fnv5SGlFKUaBVLMmgWR0C6Os45ggHNdX2UKGgGaAloD0MIUFPL1vqi6b+UhpRSlGgVSzJoFkdAujqubtqpLnV9lChoBmgJaA9DCFeW6CyzyPi/lIaUUpRoFUsyaBZHQLo6j5HEuQJ1fZQoaAZoCWgPQwhQqRJlb2nwv5SGlFKUaBVLMmgWR0C6O3cXrMTwdX2UKGgGaAloD0MIrWu0HOjh8r+UhpRSlGgVSzJoFkdAujtZnDiwS3V9lChoBmgJaA9DCCFWf4RhwPO/lIaUUpRoFUsyaBZHQLo7Oj3mFJx1fZQoaAZoCWgPQwh+ObNdoQ/wv5SGlFKUaBVLMmgWR0C6Oxtuk1uSdX2UKGgGaAloD0MIkzZV98jm47+UhpRSlGgVSzJoFkdAujwUJdB0IXV9lChoBmgJaA9DCAVpxqLprPK/lIaUUpRoFUsyaBZHQLo79qCpWFN1fZQoaAZoCWgPQwgvF/GdmPXrv5SGlFKUaBVLMmgWR0C6O9dG3F1kdX2UKGgGaAloD0MI6kKs/ghD8r+UhpRSlGgVSzJoFkdAuju4cjqv/3V9lChoBmgJaA9DCPLvMy4cCOu/lIaUUpRoFUsyaBZHQLo8orBj4Hp1fZQoaAZoCWgPQwgSFhVxOkngv5SGlFKUaBVLMmgWR0C6PIYc/+sHdX2UKGgGaAloD0MI6wCIu3oV0b+UhpRSlGgVSzJoFkdAujxnwCr923V9lChoBmgJaA9DCOz3xDpVPuK/lIaUUpRoFUsyaBZHQLo8SQuEmIF1fZQoaAZoCWgPQwhAS1ewjXjmv5SGlFKUaBVLMmgWR0C6PTbxNIsidX2UKGgGaAloD0MIw5/hzRq80b+UhpRSlGgVSzJoFkdAuj0ZdLQHA3V9lChoBmgJaA9DCC+ob5nTZc+/lIaUUpRoFUsyaBZHQLo8+bu+h5B1fZQoaAZoCWgPQwiuZwjHLHvEv5SGlFKUaBVLMmgWR0C6PNrwjMV2dX2UKGgGaAloD0MI3Xu45LjT4b+UhpRSlGgVSzJoFkdAuj3Zepn6EnV9lChoBmgJaA9DCO5aQj7o2fK/lIaUUpRoFUsyaBZHQLo9u+m3vx91fZQoaAZoCWgPQwgKhJ1i1SDWv5SGlFKUaBVLMmgWR0C6PZwe/5+IdX2UKGgGaAloD0MIKULqdvaV6L+UhpRSlGgVSzJoFkdAuj19pYcNpnV9lChoBmgJaA9DCKA1P/7S4vC/lIaUUpRoFUsyaBZHQLo+ZhYeT3Z1fZQoaAZoCWgPQwhHIF7XL9jfv5SGlFKUaBVLMmgWR0C6PkiEDhcadX2UKGgGaAloD0MISGx3D9D94L+UhpRSlGgVSzJoFkdAuj4o0VJti3V9lChoBmgJaA9DCNRi8DDtm+i/lIaUUpRoFUsyaBZHQLo+Cf3N9ph1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 62500, "n_steps": 8, "gamma": 0.99, "gae_lambda": 0.9, "ent_coef": 0.0, "vf_coef": 0.4, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.31 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.9.16", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
replay.mp4
CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
|
|
results.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"mean_reward": -
|
|
|
1 |
+
{"mean_reward": -0.6240726829855703, "std_reward": 0.27991781566466306, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-03-30T05:01:41.494328"}
|
vec_normalize.pkl
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 3056
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:15d78a527e14b02b95f2b7ed8bec657951c0cd1d0b8f7dbbc8b8c904dd1b2625
|
3 |
size 3056
|