Initial commit
Browse files- README.md +1 -1
- a2c-PandaReachDense-v2.zip +2 -2
- a2c-PandaReachDense-v2/data +13 -13
- a2c-PandaReachDense-v2/policy.optimizer.pth +2 -2
- a2c-PandaReachDense-v2/policy.pth +2 -2
- a2c-PandaReachDense-v2/system_info.txt +1 -1
- config.json +1 -1
- replay.mp4 +0 -0
- results.json +1 -1
- vec_normalize.pkl +1 -1
README.md
CHANGED
@@ -16,7 +16,7 @@ model-index:
|
|
16 |
type: PandaReachDense-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
-
value: -
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
|
|
16 |
type: PandaReachDense-v2
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
+
value: -2.00 +/- 0.56
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
a2c-PandaReachDense-v2.zip
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1d8194cd5f9662bbd22f73a4df35dfe3bd72a631bc13f6a956079e1ab505be0b
|
3 |
+
size 108029
|
a2c-PandaReachDense-v2/data
CHANGED
@@ -4,9 +4,9 @@
|
|
4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
-
"__init__": "<function MultiInputActorCriticPolicy.__init__ at
|
8 |
"__abstractmethods__": "frozenset()",
|
9 |
-
"_abc_impl": "<_abc._abc_data object at
|
10 |
},
|
11 |
"verbose": 1,
|
12 |
"policy_kwargs": {
|
@@ -41,12 +41,12 @@
|
|
41 |
"_np_random": null
|
42 |
},
|
43 |
"n_envs": 4,
|
44 |
-
"num_timesteps":
|
45 |
-
"_total_timesteps":
|
46 |
"_num_timesteps_at_start": 0,
|
47 |
"seed": null,
|
48 |
"action_noise": null,
|
49 |
-
"start_time":
|
50 |
"learning_rate": 0.0007,
|
51 |
"tensorboard_log": null,
|
52 |
"lr_schedule": {
|
@@ -55,10 +55,10 @@
|
|
55 |
},
|
56 |
"_last_obs": {
|
57 |
":type:": "<class 'collections.OrderedDict'>",
|
58 |
-
":serialized:": "
|
59 |
-
"achieved_goal": "[[ 0.
|
60 |
-
"desired_goal": "[[ 0.
|
61 |
-
"observation": "[[ 0.
|
62 |
},
|
63 |
"_last_episode_starts": {
|
64 |
":type:": "<class 'numpy.ndarray'>",
|
@@ -66,9 +66,9 @@
|
|
66 |
},
|
67 |
"_last_original_obs": {
|
68 |
":type:": "<class 'collections.OrderedDict'>",
|
69 |
-
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
70 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
71 |
-
"desired_goal": "[[ 0.
|
72 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
73 |
},
|
74 |
"_episode_num": 0,
|
@@ -77,13 +77,13 @@
|
|
77 |
"_current_progress_remaining": 0.0,
|
78 |
"ep_info_buffer": {
|
79 |
":type:": "<class 'collections.deque'>",
|
80 |
-
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
81 |
},
|
82 |
"ep_success_buffer": {
|
83 |
":type:": "<class 'collections.deque'>",
|
84 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
85 |
},
|
86 |
-
"_n_updates":
|
87 |
"n_steps": 5,
|
88 |
"gamma": 0.99,
|
89 |
"gae_lambda": 1.0,
|
|
|
4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f41bed125e0>",
|
8 |
"__abstractmethods__": "frozenset()",
|
9 |
+
"_abc_impl": "<_abc._abc_data object at 0x7f41bed0dfc0>"
|
10 |
},
|
11 |
"verbose": 1,
|
12 |
"policy_kwargs": {
|
|
|
41 |
"_np_random": null
|
42 |
},
|
43 |
"n_envs": 4,
|
44 |
+
"num_timesteps": 2000000,
|
45 |
+
"_total_timesteps": 2000000,
|
46 |
"_num_timesteps_at_start": 0,
|
47 |
"seed": null,
|
48 |
"action_noise": null,
|
49 |
+
"start_time": 1678888376124291206,
|
50 |
"learning_rate": 0.0007,
|
51 |
"tensorboard_log": null,
|
52 |
"lr_schedule": {
|
|
|
55 |
},
|
56 |
"_last_obs": {
|
57 |
":type:": "<class 'collections.OrderedDict'>",
|
58 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA5L/ePq04oLx+iQw/5L/ePq04oLx+iQw/5L/ePq04oLx+iQw/5L/ePq04oLx+iQw/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAm3XkPnsSDL/gx72/0rEJPTMZib8Jqwu+7gSiv+K4KT815QS+MkW7v8OVmD+eooK+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADkv94+rTigvH6JDD9hvjQ83NOku0uyy7rkv94+rTigvH6JDD9hvjQ83NOku0uyy7rkv94+rTigvH6JDD9hvjQ83NOku0uyy7rkv94+rTigvH6JDD9hvjQ83NOku0uyy7qUaA5LBEsGhpRoEnSUUpR1Lg==",
|
59 |
+
"achieved_goal": "[[ 0.43505776 -0.01955828 0.54897296]\n [ 0.43505776 -0.01955828 0.54897296]\n [ 0.43505776 -0.01955828 0.54897296]\n [ 0.43505776 -0.01955828 0.54897296]]",
|
60 |
+
"desired_goal": "[[ 0.44620976 -0.547157 -1.4826622 ]\n [ 0.03361685 -1.0710815 -0.13639463]\n [-1.2657754 0.66297734 -0.1297806 ]\n [-1.4630492 1.1920704 -0.25514692]]",
|
61 |
+
"observation": "[[ 0.43505776 -0.01955828 0.54897296 0.01103172 -0.00503014 -0.00155408]\n [ 0.43505776 -0.01955828 0.54897296 0.01103172 -0.00503014 -0.00155408]\n [ 0.43505776 -0.01955828 0.54897296 0.01103172 -0.00503014 -0.00155408]\n [ 0.43505776 -0.01955828 0.54897296 0.01103172 -0.00503014 -0.00155408]]"
|
62 |
},
|
63 |
"_last_episode_starts": {
|
64 |
":type:": "<class 'numpy.ndarray'>",
|
|
|
66 |
},
|
67 |
"_last_original_obs": {
|
68 |
":type:": "<class 'collections.OrderedDict'>",
|
69 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAANXsbPdN3DT4kO0Y+7ly6PYbr8j3l2gI9EDK8PLlQ+L17IYc9eoyaPV6YR739MRE+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==",
|
70 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
71 |
+
"desired_goal": "[[ 0.0379593 0.1381524 0.19358498]\n [ 0.09099756 0.11861329 0.03194704]\n [ 0.02297309 -0.12124772 0.06598183]\n [ 0.07546325 -0.04872929 0.14179225]]",
|
72 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
73 |
},
|
74 |
"_episode_num": 0,
|
|
|
77 |
"_current_progress_remaining": 0.0,
|
78 |
"ep_info_buffer": {
|
79 |
":type:": "<class 'collections.deque'>",
|
80 |
+
":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMI63JKQEyC+r+UhpRSlIwBbJRLMowBdJRHQLbHDXHzYmN1fZQoaAZoCWgPQwgJi4o4nWT4v5SGlFKUaBVLMmgWR0C2xu+k+HJtdX2UKGgGaAloD0MIflLt0/F4CMCUhpRSlGgVSzJoFkdAtsbQe3hGY3V9lChoBmgJaA9DCN1FmKJc2gfAlIaUUpRoFUsyaBZHQLbGskuHvc91fZQoaAZoCWgPQwh0X85sV+j2v5SGlFKUaBVLMmgWR0C2x9SR8twrdX2UKGgGaAloD0MICaUvhJz3AMCUhpRSlGgVSzJoFkdAtse2rmyPdXV9lChoBmgJaA9DCMKjjSPWggXAlIaUUpRoFUsyaBZHQLbHl7EYO2B1fZQoaAZoCWgPQwioABjPoOH1v5SGlFKUaBVLMmgWR0C2x3mLUCq7dX2UKGgGaAloD0MIFlCop4+A9L+UhpRSlGgVSzJoFkdAtsiUD/2kBXV9lChoBmgJaA9DCD4HliNkoP+/lIaUUpRoFUsyaBZHQLbIdiXY1511fZQoaAZoCWgPQwgOZhNgWH7zv5SGlFKUaBVLMmgWR0C2yFcPz4DcdX2UKGgGaAloD0MISs6JPbTP9L+UhpRSlGgVSzJoFkdAtsg4xREWqXV9lChoBmgJaA9DCMEcPX5v0/i/lIaUUpRoFUsyaBZHQLbJZUEPlMh1fZQoaAZoCWgPQwiMSX8vhQf0v5SGlFKUaBVLMmgWR0C2yUdpRGc4dX2UKGgGaAloD0MIK76h8Nl697+UhpRSlGgVSzJoFkdAtskocdYGMXV9lChoBmgJaA9DCKDFUiRfifW/lIaUUpRoFUsyaBZHQLbJCjwQUYd1fZQoaAZoCWgPQwi3lslwPN/zv5SGlFKUaBVLMmgWR0C2yjw7tAs1dX2UKGgGaAloD0MI8S2sG+9O9L+UhpRSlGgVSzJoFkdAtsoeenQ6ZHV9lChoBmgJaA9DCC7IluXrsuu/lIaUUpRoFUsyaBZHQLbJ/3w1BMV1fZQoaAZoCWgPQwiXqx+b5EcDwJSGlFKUaBVLMmgWR0C2yeGK64DtdX2UKGgGaAloD0MIVKnZA61gCcCUhpRSlGgVSzJoFkdAtssaEHt4RnV9lChoBmgJaA9DCFIKur2k0QPAlIaUUpRoFUsyaBZHQLbK/EidJ8R1fZQoaAZoCWgPQwhU5uYb0X38v5SGlFKUaBVLMmgWR0C2yt18LKFJdX2UKGgGaAloD0MIAmTo2EFl9r+UhpRSlGgVSzJoFkdAtsq/SNOuaHV9lChoBmgJaA9DCCMxQQ3f4gbAlIaUUpRoFUsyaBZHQLbL2UfPomp1fZQoaAZoCWgPQwg1071O6ovzv5SGlFKUaBVLMmgWR0C2y7sX7+DOdX2UKGgGaAloD0MIXwoPml038b+UhpRSlGgVSzJoFkdAtsub1YhdMXV9lChoBmgJaA9DCMAGRIgrp/e/lIaUUpRoFUsyaBZHQLbLfWxhUip1fZQoaAZoCWgPQwjS+8bXnln2v5SGlFKUaBVLMmgWR0C2zGGtQsPKdX2UKGgGaAloD0MIVaLsLeW8+r+UhpRSlGgVSzJoFkdAtsxDk/8l5XV9lChoBmgJaA9DCE2/RLx1/vK/lIaUUpRoFUsyaBZHQLbMJFt8/lh1fZQoaAZoCWgPQwjRH5p5cu0FwJSGlFKUaBVLMmgWR0C2zAXn2ZiNdX2UKGgGaAloD0MInb0z2qoECMCUhpRSlGgVSzJoFkdAtszzqOcUd3V9lChoBmgJaA9DCNfCLLRz2vC/lIaUUpRoFUsyaBZHQLbM1W912aF1fZQoaAZoCWgPQwgsvMtFfEcCwJSGlFKUaBVLMmgWR0C2zLYe1a4ddX2UKGgGaAloD0MIuYjvxKw3BMCUhpRSlGgVSzJoFkdAtsyXu5SWJXV9lChoBmgJaA9DCBK/Yg0XuQXAlIaUUpRoFUsyaBZHQLbNc0hNdqt1fZQoaAZoCWgPQwhkPbX66ir7v5SGlFKUaBVLMmgWR0C2zVUr08NhdX2UKGgGaAloD0MIvhdftMfrCMCUhpRSlGgVSzJoFkdAts014KQaJnV9lChoBmgJaA9DCLh3DfrSGwPAlIaUUpRoFUsyaBZHQLbNF1CPZIx1fZQoaAZoCWgPQwhB9KRMamjwv5SGlFKUaBVLMmgWR0C2zfNNzr/sdX2UKGgGaAloD0MI0y07xD+s97+UhpRSlGgVSzJoFkdAts3VMQEpzHV9lChoBmgJaA9DCNy8cVKYdwPAlIaUUpRoFUsyaBZHQLbNtcuanaZ1fZQoaAZoCWgPQwj+D7BW7Zr6v5SGlFKUaBVLMmgWR0C2zZdVea8ZdX2UKGgGaAloD0MI9P4/Tphw9L+UhpRSlGgVSzJoFkdAts54F/x2CHV9lChoBmgJaA9DCLqilBCsqvG/lIaUUpRoFUsyaBZHQLbOWf4REnd1fZQoaAZoCWgPQwjWqfI9I9Hxv5SGlFKUaBVLMmgWR0C2zjrKA8SxdX2UKGgGaAloD0MIAg8MIHwoAsCUhpRSlGgVSzJoFkdAts4cYpDu0HV9lChoBmgJaA9DCDMV4pF4ufK/lIaUUpRoFUsyaBZHQLbPBQ6p5u91fZQoaAZoCWgPQwiCPLt868Pmv5SGlFKUaBVLMmgWR0C2zubXtjTbdX2UKGgGaAloD0MIETY8vVIW5b+UhpRSlGgVSzJoFkdAts7Hk/8l5XV9lChoBmgJaA9DCF8NUBpqFArAlIaUUpRoFUsyaBZHQLbOqTDfm9x1fZQoaAZoCWgPQwje40wTtl/0v5SGlFKUaBVLMmgWR0C2z4Pkq+ajdX2UKGgGaAloD0MIQ61p3nFK9L+UhpRSlGgVSzJoFkdAts9lvHcUNHV9lChoBmgJaA9DCPRsVn2u9vm/lIaUUpRoFUsyaBZHQLbPRmRvFWJ1fZQoaAZoCWgPQwhXzXNEvsvvv5SGlFKUaBVLMmgWR0C2zyfYJ3PidX2UKGgGaAloD0MI20/G+DB7+L+UhpRSlGgVSzJoFkdAttABnyup0nV9lChoBmgJaA9DCL1uERjrG/m/lIaUUpRoFUsyaBZHQLbP43vhIe51fZQoaAZoCWgPQwhvZvSj4RTqv5SGlFKUaBVLMmgWR0C2z8Q6ltTDdX2UKGgGaAloD0MItkqwOJx56L+UhpRSlGgVSzJoFkdAts+l0PpY93V9lChoBmgJaA9DCMO4G0RrBfe/lIaUUpRoFUsyaBZHQLbQg8b70nR1fZQoaAZoCWgPQwirzJTW39Lzv5SGlFKUaBVLMmgWR0C20GWipNsWdX2UKGgGaAloD0MIcM6I0t5g9r+UhpRSlGgVSzJoFkdAttBGXMQmNXV9lChoBmgJaA9DCC7JAbuaPAHAlIaUUpRoFUsyaBZHQLbQJ+aBqbl1fZQoaAZoCWgPQwixhovc09X+v5SGlFKUaBVLMmgWR0C20QnwTdtVdX2UKGgGaAloD0MIozuInSl087+UhpRSlGgVSzJoFkdAttDryAhB7nV9lChoBmgJaA9DCHOgh9o2zP6/lIaUUpRoFUsyaBZHQLbQzH9FWn11fZQoaAZoCWgPQwj2DUxuFBn4v5SGlFKUaBVLMmgWR0C20K4Pf8/EdX2UKGgGaAloD0MIIy2VtyOc/b+UhpRSlGgVSzJoFkdAttGI8uBczXV9lChoBmgJaA9DCGGm7V9Zqfe/lIaUUpRoFUsyaBZHQLbRatwJgLJ1fZQoaAZoCWgPQwi0OjlDcUfyv5SGlFKUaBVLMmgWR0C20UuQ6p5vdX2UKGgGaAloD0MIamrZWl9k97+UhpRSlGgVSzJoFkdAttEs+qzZ6HV9lChoBmgJaA9DCIDxDBr65wHAlIaUUpRoFUsyaBZHQLbSBxZdOZd1fZQoaAZoCWgPQwgi3jr/dln5v5SGlFKUaBVLMmgWR0C20ejqjaf0dX2UKGgGaAloD0MIU5RL4xd+B8CUhpRSlGgVSzJoFkdAttHJwiqyW3V9lChoBmgJaA9DCAPv5NNj2+y/lIaUUpRoFUsyaBZHQLbRq0ALiMp1fZQoaAZoCWgPQwgW3uUivtP6v5SGlFKUaBVLMmgWR0C20otxVAAydX2UKGgGaAloD0MIZDvfT43X/b+UhpRSlGgVSzJoFkdAttJtRiw0O3V9lChoBmgJaA9DCFiNJayN8fC/lIaUUpRoFUsyaBZHQLbSTfh/Aj91fZQoaAZoCWgPQwir0asBSkMIwJSGlFKUaBVLMmgWR0C20i9+5OJtdX2UKGgGaAloD0MINEqX/iUp+r+UhpRSlGgVSzJoFkdAttMKKCQLeHV9lChoBmgJaA9DCEGADB07iADAlIaUUpRoFUsyaBZHQLbS6/dIoVp1fZQoaAZoCWgPQwiIR+Ll6XwBwJSGlFKUaBVLMmgWR0C20synpB5YdX2UKGgGaAloD0MI8S4X8Z0Y8r+UhpRSlGgVSzJoFkdAttKuKl54W3V9lChoBmgJaA9DCFouG53zMwXAlIaUUpRoFUsyaBZHQLbTjX8fmtB1fZQoaAZoCWgPQwjTakjcY6n7v5SGlFKUaBVLMmgWR0C2029W6shgdX2UKGgGaAloD0MIX7THC+nw8r+UhpRSlGgVSzJoFkdAttNQCvHLinV9lChoBmgJaA9DCNrlWx/W2wHAlIaUUpRoFUsyaBZHQLbTMbW3BpJ1fZQoaAZoCWgPQwjECrd8JOXyv5SGlFKUaBVLMmgWR0C21Algtvn9dX2UKGgGaAloD0MIBP9byY4N/b+UhpRSlGgVSzJoFkdAttPrLcKw6nV9lChoBmgJaA9DCJtY4Cu6Nf6/lIaUUpRoFUsyaBZHQLbTy8A7xNJ1fZQoaAZoCWgPQwiyaDo7GRwBwJSGlFKUaBVLMmgWR0C2061G9YfXdX2UKGgGaAloD0MISMK+nUQEBMCUhpRSlGgVSzJoFkdAttSG8pTdcnV9lChoBmgJaA9DCB/3rdaJSwvAlIaUUpRoFUsyaBZHQLbUaMjNY8x1fZQoaAZoCWgPQwhOYaWCiqr6v5SGlFKUaBVLMmgWR0C21ElhsqJ/dX2UKGgGaAloD0MInZs24zSE/b+UhpRSlGgVSzJoFkdAttQqyon8bnV9lChoBmgJaA9DCMNkqmBUUgPAlIaUUpRoFUsyaBZHQLbVA9c8klh1fZQoaAZoCWgPQwiMuWsJ+eAAwJSGlFKUaBVLMmgWR0C21OW1x82KdX2UKGgGaAloD0MIVmR0QBK2+b+UhpRSlGgVSzJoFkdAttTGfukUK3V9lChoBmgJaA9DCIaRXtTul/W/lIaUUpRoFUsyaBZHQLbUqBV+7UZ1ZS4="
|
81 |
},
|
82 |
"ep_success_buffer": {
|
83 |
":type:": "<class 'collections.deque'>",
|
84 |
":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="
|
85 |
},
|
86 |
+
"_n_updates": 100000,
|
87 |
"n_steps": 5,
|
88 |
"gamma": 0.99,
|
89 |
"gae_lambda": 1.0,
|
a2c-PandaReachDense-v2/policy.optimizer.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:edf5e86147bfedf122a655e944ad2921cc8970202ab53ba308ab207120e0e78e
|
3 |
+
size 44734
|
a2c-PandaReachDense-v2/policy.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7d2d71de777dbde785ca5cf0a2a1f245313ed94afba2634f38e6d7f2ea0e0e58
|
3 |
+
size 46014
|
a2c-PandaReachDense-v2/system_info.txt
CHANGED
@@ -2,6 +2,6 @@
|
|
2 |
- Python: 3.9.16
|
3 |
- Stable-Baselines3: 1.7.0
|
4 |
- PyTorch: 1.13.1+cu116
|
5 |
-
- GPU Enabled:
|
6 |
- Numpy: 1.22.4
|
7 |
- Gym: 0.21.0
|
|
|
2 |
- Python: 3.9.16
|
3 |
- Stable-Baselines3: 1.7.0
|
4 |
- PyTorch: 1.13.1+cu116
|
5 |
+
- GPU Enabled: True
|
6 |
- Numpy: 1.22.4
|
7 |
- Gym: 0.21.0
|
config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7fc03217b040>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7fc032176e80>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 1000000, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1678873970167874476, "learning_rate": 0.0007, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/RvAGjbi6x4WUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAAZA3gPlyUhryBbA0/ZA3gPlyUhryBbA0/ZA3gPlyUhryBbA0/ZA3gPlyUhryBbA0/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAA7OewPMbgiD5Uuto/mSmRv46qgr+vTVy/az+Mv8Cmtb/6Hgq/av3mPizTJj28aPg9lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAABkDeA+XJSGvIFsDT+qFkm86crRuh8tALxkDeA+XJSGvIFsDT+qFkm86crRuh8tALxkDeA+XJSGvIFsDT+qFkm86crRuh8tALxkDeA+XJSGvIFsDT+qFkm86crRuh8tALyUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 0.43760216 -0.01642817 0.5524369 ]\n [ 0.43760216 -0.01642817 0.5524369 ]\n [ 0.43760216 -0.01642817 0.5524369 ]\n [ 0.43760216 -0.01642817 0.5524369 ]]", "desired_goal": "[[ 0.02159496 0.2673399 1.7088113 ]\n [-1.134082 -1.0208299 -0.86056036]\n [-1.0956854 -1.4191513 -0.53953516]\n [ 0.45115215 0.04072873 0.12129351]]", "observation": "[[ 0.43760216 -0.01642817 0.5524369 -0.01227347 -0.00160059 -0.00782326]\n [ 0.43760216 -0.01642817 0.5524369 -0.01227347 -0.00160059 -0.00782326]\n [ 0.43760216 -0.01642817 0.5524369 -0.01227347 -0.00160059 -0.00782326]\n [ 0.43760216 -0.01642817 0.5524369 -0.01227347 -0.00160059 -0.00782326]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAALXXwPVjJ972x2x48Mf0NPs7Ld704g5A+YQ6HPf087T3rW3k88PIXvt/Ewz3Wkg89lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[ 0.117411 -0.1209895 0.00969593]\n [ 0.13866116 -0.0604971 0.28225112]\n [ 0.06594539 0.11583898 0.01521967]\n [-0.14838767 0.09559035 0.03505214]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMIZsBZSpYTDcCUhpRSlIwBbJRLMowBdJRHQKnuH1nuiN91fZQoaAZoCWgPQwhVh9wMN+ALwJSGlFKUaBVLMmgWR0Cp7ZVurIYFdX2UKGgGaAloD0MIibZj6q7MDMCUhpRSlGgVSzJoFkdAqe0P531SO3V9lChoBmgJaA9DCPYoXI/CxRXAlIaUUpRoFUsyaBZHQKnsl2X9itt1fZQoaAZoCWgPQwhDBBxCldoYwJSGlFKUaBVLMmgWR0Cp7zEDIRywdX2UKGgGaAloD0MIHEMAcOxZCMCUhpRSlGgVSzJoFkdAqe6m7+T/yXV9lChoBmgJaA9DCJF7urpjkQjAlIaUUpRoFUsyaBZHQKnuIZVGTcJ1fZQoaAZoCWgPQwiuKZDZWXQWwJSGlFKUaBVLMmgWR0Cp7ajZ13dLdX2UKGgGaAloD0MIrUuN0M9UDMCUhpRSlGgVSzJoFkdAqfBABJZntnV9lChoBmgJaA9DCOJ0kq0uJw7AlIaUUpRoFUsyaBZHQKnvteu3c591fZQoaAZoCWgPQwiM8zehEAEcwJSGlFKUaBVLMmgWR0Cp7zBcZ9/jdX2UKGgGaAloD0MI36gVpu8VBsCUhpRSlGgVSzJoFkdAqe63m9xp+XV9lChoBmgJaA9DCBFzSdV2wyDAlIaUUpRoFUsyaBZHQKnxTCojv/l1fZQoaAZoCWgPQwjcuTDSi6ocwJSGlFKUaBVLMmgWR0Cp8MI2wV0tdX2UKGgGaAloD0MIKjdRS3NrFMCUhpRSlGgVSzJoFkdAqfA8nmaH9HV9lChoBmgJaA9DCPYNTG4UKRTAlIaUUpRoFUsyaBZHQKnvw9V3ljp1fZQoaAZoCWgPQwjbMXVXdkEOwJSGlFKUaBVLMmgWR0Cp8lGXXyy2dX2UKGgGaAloD0MIKENVTKUf+7+UhpRSlGgVSzJoFkdAqfHHlfZ26nV9lChoBmgJaA9DCDigpSvYJhPAlIaUUpRoFUsyaBZHQKnxQkDZDiR1fZQoaAZoCWgPQwg3T3XIzbAMwJSGlFKUaBVLMmgWR0Cp8Ml6Z6UrdX2UKGgGaAloD0MIQfUPIhky/r+UhpRSlGgVSzJoFkdAqfNuee4Cp3V9lChoBmgJaA9DCAlP6PUnARHAlIaUUpRoFUsyaBZHQKny5E/jbSJ1fZQoaAZoCWgPQwgeUDblCk8WwJSGlFKUaBVLMmgWR0Cp8l7SApazdX2UKGgGaAloD0MIQfLOoQyVEcCUhpRSlGgVSzJoFkdAqfHl+ocaO3V9lChoBmgJaA9DCCQqVDcXDxjAlIaUUpRoFUsyaBZHQKn0gYsunMt1fZQoaAZoCWgPQwgrMGR1q3cUwJSGlFKUaBVLMmgWR0Cp8/efh/AkdX2UKGgGaAloD0MIfy+FB81+GsCUhpRSlGgVSzJoFkdAqfNyWcBltnV9lChoBmgJaA9DCOhNRSqMXR7AlIaUUpRoFUsyaBZHQKny+ZTAFgV1fZQoaAZoCWgPQwirsYS1MWYXwJSGlFKUaBVLMmgWR0Cp9Y4f4h2XdX2UKGgGaAloD0MIHy3OGObECcCUhpRSlGgVSzJoFkdAqfUEGmk30nV9lChoBmgJaA9DCCHp0yr6IwrAlIaUUpRoFUsyaBZHQKn0foePq9p1fZQoaAZoCWgPQwiakxeZgI8TwJSGlFKUaBVLMmgWR0Cp9AXVTaTPdX2UKGgGaAloD0MISphp+1eWCMCUhpRSlGgVSzJoFkdAqfaWW0JF9nV9lChoBmgJaA9DCFVq9kAr0AvAlIaUUpRoFUsyaBZHQKn2DFDOTq11fZQoaAZoCWgPQwhiLNMvEZ8RwJSGlFKUaBVLMmgWR0Cp9YbPY4ACdX2UKGgGaAloD0MIBHXKoxthBMCUhpRSlGgVSzJoFkdAqfUOAAhjfHV9lChoBmgJaA9DCJG1hlJ7gRnAlIaUUpRoFUsyaBZHQKn3pV1fVqh1fZQoaAZoCWgPQwjDSC9q94sGwJSGlFKUaBVLMmgWR0Cp9xtHhCMQdX2UKGgGaAloD0MI+PnvwWs3CMCUhpRSlGgVSzJoFkdAqfaVs54nnnV9lChoBmgJaA9DCByWBn5UYwXAlIaUUpRoFUsyaBZHQKn2HO8kD6p1fZQoaAZoCWgPQwgld9hEZu4FwJSGlFKUaBVLMmgWR0Cp+LMoDxLCdX2UKGgGaAloD0MIfQkVHF5QD8CUhpRSlGgVSzJoFkdAqfgpC0F8onV9lChoBmgJaA9DCC5XPzbJzxjAlIaUUpRoFUsyaBZHQKn3o36Q/5d1fZQoaAZoCWgPQwie76fGSxcTwJSGlFKUaBVLMmgWR0Cp9yqbjLjhdX2UKGgGaAloD0MIN8e5TbiHE8CUhpRSlGgVSzJoFkdAqfoHlhgE2nV9lChoBmgJaA9DCI4HW+z2aRPAlIaUUpRoFUsyaBZHQKn5fhUipvR1fZQoaAZoCWgPQwjPZ0C9GfUDwJSGlFKUaBVLMmgWR0Cp+Pmgi/widX2UKGgGaAloD0MIg1FJnYCGAcCUhpRSlGgVSzJoFkdAqfiBwVCXyHV9lChoBmgJaA9DCCSYamYtRQTAlIaUUpRoFUsyaBZHQKn7vn13+uN1fZQoaAZoCWgPQwiRYRVvZH4EwJSGlFKUaBVLMmgWR0Cp+zVvddmhdX2UKGgGaAloD0MI7MGk+PgEDcCUhpRSlGgVSzJoFkdAqfqwoRZlnXV9lChoBmgJaA9DCP7UeOkmgRnAlIaUUpRoFUsyaBZHQKn6OH0se4l1fZQoaAZoCWgPQwis5jki36UFwJSGlFKUaBVLMmgWR0Cp/WsK9f1IdX2UKGgGaAloD0MIYi6p2m6CB8CUhpRSlGgVSzJoFkdAqfzh+OOsDHV9lChoBmgJaA9DCAADQYAMfQDAlIaUUpRoFUsyaBZHQKn8XWSU1Q91fZQoaAZoCWgPQwhtHofB/PUOwJSGlFKUaBVLMmgWR0Cp++VOj7AMdX2UKGgGaAloD0MIMA+Z8iGICMCUhpRSlGgVSzJoFkdAqf8wwoLG73V9lChoBmgJaA9DCGufjscMdAjAlIaUUpRoFUsyaBZHQKn+p5TIeYF1fZQoaAZoCWgPQwgSwTi4dGwEwJSGlFKUaBVLMmgWR0Cp/iLo4dZJdX2UKGgGaAloD0MITimvldA9DcCUhpRSlGgVSzJoFkdAqf2q7wrlNnV9lChoBmgJaA9DCJkrg2qD0wLAlIaUUpRoFUsyaBZHQKoBCmXw9aF1fZQoaAZoCWgPQwjP+L64VCUWwJSGlFKUaBVLMmgWR0CqAIEv9LpSdX2UKGgGaAloD0MIzm4tk+HIEsCUhpRSlGgVSzJoFkdAqf/8qBmPHXV9lChoBmgJaA9DCJtZSwFp3wjAlIaUUpRoFUsyaBZHQKn/hV/+bVl1fZQoaAZoCWgPQwh63o0FhUELwJSGlFKUaBVLMmgWR0CqAtfhMrVfdX2UKGgGaAloD0MI8nwG1JvxA8CUhpRSlGgVSzJoFkdAqgJOt4iX6nV9lChoBmgJaA9DCCzwFd16TRHAlIaUUpRoFUsyaBZHQKoByjdHlOp1fZQoaAZoCWgPQwguy9dl+K8PwJSGlFKUaBVLMmgWR0CqAVJcxCY1dX2UKGgGaAloD0MIN8E3TZ99CsCUhpRSlGgVSzJoFkdAqgSq6vq1PXV9lChoBmgJaA9DCM2RlV8GowbAlIaUUpRoFUsyaBZHQKoEIcn3L3d1fZQoaAZoCWgPQwiESIYcW88NwJSGlFKUaBVLMmgWR0CqA51FhG6PdX2UKGgGaAloD0MI0HtjCABeHcCUhpRSlGgVSzJoFkdAqgMlbX6InHV9lChoBmgJaA9DCKGhf4KLJRnAlIaUUpRoFUsyaBZHQKoGOCpWFOB1fZQoaAZoCWgPQwiMS1Xa4voYwJSGlFKUaBVLMmgWR0CqBa5Wq95AdX2UKGgGaAloD0MI+OKL9nixIMCUhpRSlGgVSzJoFkdAqgUo0VJti3V9lChoBmgJaA9DCHhDGhU4WQjAlIaUUpRoFUsyaBZHQKoEsAMlTm51fZQoaAZoCWgPQwg6PITx01gFwJSGlFKUaBVLMmgWR0CqB0SIP9UCdX2UKGgGaAloD0MIeCXJc31fEMCUhpRSlGgVSzJoFkdAqga6hBZ6lnV9lChoBmgJaA9DCDiCVIodbQnAlIaUUpRoFUsyaBZHQKoGNPwd8zB1fZQoaAZoCWgPQwgg0Jm0qRoIwJSGlFKUaBVLMmgWR0CqBbwwj+rEdX2UKGgGaAloD0MIMCsU6X6+GsCUhpRSlGgVSzJoFkdAqghRwhnrZHV9lChoBmgJaA9DCKM6Hch6WhbAlIaUUpRoFUsyaBZHQKoHyDHOryV1fZQoaAZoCWgPQwgwEtpyLqUGwJSGlFKUaBVLMmgWR0CqB0NK7I1cdX2UKGgGaAloD0MIs82N6QlrEMCUhpRSlGgVSzJoFkdAqgbKsr/bTXV9lChoBmgJaA9DCKQ1Bp0Q6iDAlIaUUpRoFUsyaBZHQKoJXGipNsZ1fZQoaAZoCWgPQwjRksfT8mMPwJSGlFKUaBVLMmgWR0CqCNJ1zQu3dX2UKGgGaAloD0MIwt1Zu+0CEcCUhpRSlGgVSzJoFkdAqghNC5VfeHV9lChoBmgJaA9DCHo4gem0bgTAlIaUUpRoFUsyaBZHQKoH1F2mpER1fZQoaAZoCWgPQwhK0cq9wJwSwJSGlFKUaBVLMmgWR0CqCmjgIhQndX2UKGgGaAloD0MI+Z0mM95WEsCUhpRSlGgVSzJoFkdAqgnezMRpUXV9lChoBmgJaA9DCIzzN6EQoQPAlIaUUpRoFUsyaBZHQKoJWUrTYul1fZQoaAZoCWgPQwhVavZAK5AEwJSGlFKUaBVLMmgWR0CqCOCNbTttdX2UKGgGaAloD0MIj4tqEVFMCMCUhpRSlGgVSzJoFkdAqgtwAp8WsXV9lChoBmgJaA9DCFX3yOaq+RPAlIaUUpRoFUsyaBZHQKoK5eOXE611fZQoaAZoCWgPQwiMTMCvkXQZwJSGlFKUaBVLMmgWR0CqCmBqj8DTdX2UKGgGaAloD0MIejTVk/lnCsCUhpRSlGgVSzJoFkdAqgnnldTo+3V9lChoBmgJaA9DCFa2D3nLZRTAlIaUUpRoFUsyaBZHQKoMiQDmr811fZQoaAZoCWgPQwgOMsnIWQgSwJSGlFKUaBVLMmgWR0CqC/8IAwPAdX2UKGgGaAloD0MIltHI5xW/EsCUhpRSlGgVSzJoFkdAqgt5kEs8PnV9lChoBmgJaA9DCIih1ckZGhLAlIaUUpRoFUsyaBZHQKoLAMn7YTV1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 50000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.31 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.9.16", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "False", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
|
|
1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f41bed125e0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7f41bed0dfc0>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "observation_space": {":type:": "<class 'gym.spaces.dict.Dict'>", ":serialized:": "gAWVUgMAAAAAAACMD2d5bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwOZ3ltLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUaBCTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowGX3NoYXBllEsDhZSMA2xvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZSMAUOUdJRSlIwEaGlnaJRoHSiWDAAAAAAAAAAAACBBAAAgQQAAIEGUaBVLA4WUaCB0lFKUjA1ib3VuZGVkX2JlbG93lGgdKJYDAAAAAAAAAAEBAZRoEowCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZRoIHSUUpSMDWJvdW5kZWRfYWJvdmWUaB0olgMAAAAAAAAAAQEBlGgsSwOFlGggdJRSlIwKX25wX3JhbmRvbZROdWKMDGRlc2lyZWRfZ29hbJRoDSmBlH2UKGgQaBVoGEsDhZRoGmgdKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFUsDhZRoIHSUUpRoI2gdKJYMAAAAAAAAAAAAIEEAACBBAAAgQZRoFUsDhZRoIHSUUpRoKGgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoMmgdKJYDAAAAAAAAAAEBAZRoLEsDhZRoIHSUUpRoN051YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgVaBhLBoWUaBpoHSiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBVLBoWUaCB0lFKUaCNoHSiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBVLBoWUaCB0lFKUaChoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDJoHSiWBgAAAAAAAAABAQEBAQGUaCxLBoWUaCB0lFKUaDdOdWJ1aBhOaBBOaDdOdWIu", "spaces": "OrderedDict([('achieved_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('desired_goal', Box([-10. -10. -10.], [10. 10. 10.], (3,), float32)), ('observation', Box([-10. -10. -10. -10. -10. -10.], [10. 10. 10. 10. 10. 10.], (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gym.spaces.box.Box'>", ":serialized:": "gAWVbQEAAAAAAACMDmd5bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lGgFk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMBl9zaGFwZZRLA4WUjANsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaApLA4WUjAFDlHSUUpSMBGhpZ2iUaBIolgwAAAAAAAAAAACAPwAAgD8AAIA/lGgKSwOFlGgVdJRSlIwNYm91bmRlZF9iZWxvd5RoEiiWAwAAAAAAAAABAQGUaAeMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUaBV0lFKUjA1ib3VuZGVkX2Fib3ZllGgSKJYDAAAAAAAAAAEBAZRoIUsDhZRoFXSUUpSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_np_random": null}, "n_envs": 4, "num_timesteps": 2000000, "_total_timesteps": 2000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1678888376124291206, "learning_rate": 0.0007, "tensorboard_log": null, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVwwIAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZSMBGZ1bmOUS4JDAgABlIwDdmFslIWUKXSUUpR9lCiMC19fcGFja2FnZV9flIwYc3RhYmxlX2Jhc2VsaW5lczMuY29tbW9ulIwIX19uYW1lX1+UjB5zdGFibGVfYmFzZWxpbmVzMy5jb21tb24udXRpbHOUjAhfX2ZpbGVfX5SMSC91c3IvbG9jYWwvbGliL3B5dGhvbjMuOS9kaXN0LXBhY2thZ2VzL3N0YWJsZV9iYXNlbGluZXMzL2NvbW1vbi91dGlscy5weZR1Tk5oAIwQX21ha2VfZW1wdHlfY2VsbJSTlClSlIWUdJRSlIwcY2xvdWRwaWNrbGUuY2xvdWRwaWNrbGVfZmFzdJSMEl9mdW5jdGlvbl9zZXRzdGF0ZZSTlGgffZR9lChoFmgNjAxfX3F1YWxuYW1lX1+UjBljb25zdGFudF9mbi48bG9jYWxzPi5mdW5jlIwPX19hbm5vdGF0aW9uc19flH2UjA5fX2t3ZGVmYXVsdHNfX5ROjAxfX2RlZmF1bHRzX1+UTowKX19tb2R1bGVfX5RoF4wHX19kb2NfX5ROjAtfX2Nsb3N1cmVfX5RoAIwKX21ha2VfY2VsbJSTlEc/RvAGjbi6x4WUUpSFlIwXX2Nsb3VkcGlja2xlX3N1Ym1vZHVsZXOUXZSMC19fZ2xvYmFsc19flH2UdYaUhlIwLg=="}, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA5L/ePq04oLx+iQw/5L/ePq04oLx+iQw/5L/ePq04oLx+iQw/5L/ePq04oLx+iQw/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAm3XkPnsSDL/gx72/0rEJPTMZib8Jqwu+7gSiv+K4KT815QS+MkW7v8OVmD+eooK+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADkv94+rTigvH6JDD9hvjQ83NOku0uyy7rkv94+rTigvH6JDD9hvjQ83NOku0uyy7rkv94+rTigvH6JDD9hvjQ83NOku0uyy7rkv94+rTigvH6JDD9hvjQ83NOku0uyy7qUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 0.43505776 -0.01955828 0.54897296]\n [ 0.43505776 -0.01955828 0.54897296]\n [ 0.43505776 -0.01955828 0.54897296]\n [ 0.43505776 -0.01955828 0.54897296]]", "desired_goal": "[[ 0.44620976 -0.547157 -1.4826622 ]\n [ 0.03361685 -1.0710815 -0.13639463]\n [-1.2657754 0.66297734 -0.1297806 ]\n [-1.4630492 1.1920704 -0.25514692]]", "observation": "[[ 0.43505776 -0.01955828 0.54897296 0.01103172 -0.00503014 -0.00155408]\n [ 0.43505776 -0.01955828 0.54897296 0.01103172 -0.00503014 -0.00155408]\n [ 0.43505776 -0.01955828 0.54897296 0.01103172 -0.00503014 -0.00155408]\n [ 0.43505776 -0.01955828 0.54897296 0.01103172 -0.00503014 -0.00155408]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAQGUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAANXsbPdN3DT4kO0Y+7ly6PYbr8j3l2gI9EDK8PLlQ+L17IYc9eoyaPV6YR739MRE+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[ 0.0379593 0.1381524 0.19358498]\n [ 0.09099756 0.11861329 0.03194704]\n [ 0.02297309 -0.12124772 0.06598183]\n [ 0.07546325 -0.04872929 0.14179225]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVHRAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpSMFW51bXB5LmNvcmUubXVsdGlhcnJheZSMBnNjYWxhcpSTlIwFbnVtcHmUjAVkdHlwZZSTlIwCZjiUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYkMI63JKQEyC+r+UhpRSlIwBbJRLMowBdJRHQLbHDXHzYmN1fZQoaAZoCWgPQwgJi4o4nWT4v5SGlFKUaBVLMmgWR0C2xu+k+HJtdX2UKGgGaAloD0MIflLt0/F4CMCUhpRSlGgVSzJoFkdAtsbQe3hGY3V9lChoBmgJaA9DCN1FmKJc2gfAlIaUUpRoFUsyaBZHQLbGskuHvc91fZQoaAZoCWgPQwh0X85sV+j2v5SGlFKUaBVLMmgWR0C2x9SR8twrdX2UKGgGaAloD0MICaUvhJz3AMCUhpRSlGgVSzJoFkdAtse2rmyPdXV9lChoBmgJaA9DCMKjjSPWggXAlIaUUpRoFUsyaBZHQLbHl7EYO2B1fZQoaAZoCWgPQwioABjPoOH1v5SGlFKUaBVLMmgWR0C2x3mLUCq7dX2UKGgGaAloD0MIFlCop4+A9L+UhpRSlGgVSzJoFkdAtsiUD/2kBXV9lChoBmgJaA9DCD4HliNkoP+/lIaUUpRoFUsyaBZHQLbIdiXY1511fZQoaAZoCWgPQwgOZhNgWH7zv5SGlFKUaBVLMmgWR0C2yFcPz4DcdX2UKGgGaAloD0MISs6JPbTP9L+UhpRSlGgVSzJoFkdAtsg4xREWqXV9lChoBmgJaA9DCMEcPX5v0/i/lIaUUpRoFUsyaBZHQLbJZUEPlMh1fZQoaAZoCWgPQwiMSX8vhQf0v5SGlFKUaBVLMmgWR0C2yUdpRGc4dX2UKGgGaAloD0MIK76h8Nl697+UhpRSlGgVSzJoFkdAtskocdYGMXV9lChoBmgJaA9DCKDFUiRfifW/lIaUUpRoFUsyaBZHQLbJCjwQUYd1fZQoaAZoCWgPQwi3lslwPN/zv5SGlFKUaBVLMmgWR0C2yjw7tAs1dX2UKGgGaAloD0MI8S2sG+9O9L+UhpRSlGgVSzJoFkdAtsoeenQ6ZHV9lChoBmgJaA9DCC7IluXrsuu/lIaUUpRoFUsyaBZHQLbJ/3w1BMV1fZQoaAZoCWgPQwiXqx+b5EcDwJSGlFKUaBVLMmgWR0C2yeGK64DtdX2UKGgGaAloD0MIVKnZA61gCcCUhpRSlGgVSzJoFkdAtssaEHt4RnV9lChoBmgJaA9DCFIKur2k0QPAlIaUUpRoFUsyaBZHQLbK/EidJ8R1fZQoaAZoCWgPQwhU5uYb0X38v5SGlFKUaBVLMmgWR0C2yt18LKFJdX2UKGgGaAloD0MIAmTo2EFl9r+UhpRSlGgVSzJoFkdAtsq/SNOuaHV9lChoBmgJaA9DCCMxQQ3f4gbAlIaUUpRoFUsyaBZHQLbL2UfPomp1fZQoaAZoCWgPQwg1071O6ovzv5SGlFKUaBVLMmgWR0C2y7sX7+DOdX2UKGgGaAloD0MIXwoPml038b+UhpRSlGgVSzJoFkdAtsub1YhdMXV9lChoBmgJaA9DCMAGRIgrp/e/lIaUUpRoFUsyaBZHQLbLfWxhUip1fZQoaAZoCWgPQwjS+8bXnln2v5SGlFKUaBVLMmgWR0C2zGGtQsPKdX2UKGgGaAloD0MIVaLsLeW8+r+UhpRSlGgVSzJoFkdAtsxDk/8l5XV9lChoBmgJaA9DCE2/RLx1/vK/lIaUUpRoFUsyaBZHQLbMJFt8/lh1fZQoaAZoCWgPQwjRH5p5cu0FwJSGlFKUaBVLMmgWR0C2zAXn2ZiNdX2UKGgGaAloD0MInb0z2qoECMCUhpRSlGgVSzJoFkdAtszzqOcUd3V9lChoBmgJaA9DCNfCLLRz2vC/lIaUUpRoFUsyaBZHQLbM1W912aF1fZQoaAZoCWgPQwgsvMtFfEcCwJSGlFKUaBVLMmgWR0C2zLYe1a4ddX2UKGgGaAloD0MIuYjvxKw3BMCUhpRSlGgVSzJoFkdAtsyXu5SWJXV9lChoBmgJaA9DCBK/Yg0XuQXAlIaUUpRoFUsyaBZHQLbNc0hNdqt1fZQoaAZoCWgPQwhkPbX66ir7v5SGlFKUaBVLMmgWR0C2zVUr08NhdX2UKGgGaAloD0MIvhdftMfrCMCUhpRSlGgVSzJoFkdAts014KQaJnV9lChoBmgJaA9DCLh3DfrSGwPAlIaUUpRoFUsyaBZHQLbNF1CPZIx1fZQoaAZoCWgPQwhB9KRMamjwv5SGlFKUaBVLMmgWR0C2zfNNzr/sdX2UKGgGaAloD0MI0y07xD+s97+UhpRSlGgVSzJoFkdAts3VMQEpzHV9lChoBmgJaA9DCNy8cVKYdwPAlIaUUpRoFUsyaBZHQLbNtcuanaZ1fZQoaAZoCWgPQwj+D7BW7Zr6v5SGlFKUaBVLMmgWR0C2zZdVea8ZdX2UKGgGaAloD0MI9P4/Tphw9L+UhpRSlGgVSzJoFkdAts54F/x2CHV9lChoBmgJaA9DCLqilBCsqvG/lIaUUpRoFUsyaBZHQLbOWf4REnd1fZQoaAZoCWgPQwjWqfI9I9Hxv5SGlFKUaBVLMmgWR0C2zjrKA8SxdX2UKGgGaAloD0MIAg8MIHwoAsCUhpRSlGgVSzJoFkdAts4cYpDu0HV9lChoBmgJaA9DCDMV4pF4ufK/lIaUUpRoFUsyaBZHQLbPBQ6p5u91fZQoaAZoCWgPQwiCPLt868Pmv5SGlFKUaBVLMmgWR0C2zubXtjTbdX2UKGgGaAloD0MIETY8vVIW5b+UhpRSlGgVSzJoFkdAts7Hk/8l5XV9lChoBmgJaA9DCF8NUBpqFArAlIaUUpRoFUsyaBZHQLbOqTDfm9x1fZQoaAZoCWgPQwje40wTtl/0v5SGlFKUaBVLMmgWR0C2z4Pkq+ajdX2UKGgGaAloD0MIQ61p3nFK9L+UhpRSlGgVSzJoFkdAts9lvHcUNHV9lChoBmgJaA9DCPRsVn2u9vm/lIaUUpRoFUsyaBZHQLbPRmRvFWJ1fZQoaAZoCWgPQwhXzXNEvsvvv5SGlFKUaBVLMmgWR0C2zyfYJ3PidX2UKGgGaAloD0MI20/G+DB7+L+UhpRSlGgVSzJoFkdAttABnyup0nV9lChoBmgJaA9DCL1uERjrG/m/lIaUUpRoFUsyaBZHQLbP43vhIe51fZQoaAZoCWgPQwhvZvSj4RTqv5SGlFKUaBVLMmgWR0C2z8Q6ltTDdX2UKGgGaAloD0MItkqwOJx56L+UhpRSlGgVSzJoFkdAts+l0PpY93V9lChoBmgJaA9DCMO4G0RrBfe/lIaUUpRoFUsyaBZHQLbQg8b70nR1fZQoaAZoCWgPQwirzJTW39Lzv5SGlFKUaBVLMmgWR0C20GWipNsWdX2UKGgGaAloD0MIcM6I0t5g9r+UhpRSlGgVSzJoFkdAttBGXMQmNXV9lChoBmgJaA9DCC7JAbuaPAHAlIaUUpRoFUsyaBZHQLbQJ+aBqbl1fZQoaAZoCWgPQwixhovc09X+v5SGlFKUaBVLMmgWR0C20QnwTdtVdX2UKGgGaAloD0MIozuInSl087+UhpRSlGgVSzJoFkdAttDryAhB7nV9lChoBmgJaA9DCHOgh9o2zP6/lIaUUpRoFUsyaBZHQLbQzH9FWn11fZQoaAZoCWgPQwj2DUxuFBn4v5SGlFKUaBVLMmgWR0C20K4Pf8/EdX2UKGgGaAloD0MIIy2VtyOc/b+UhpRSlGgVSzJoFkdAttGI8uBczXV9lChoBmgJaA9DCGGm7V9Zqfe/lIaUUpRoFUsyaBZHQLbRatwJgLJ1fZQoaAZoCWgPQwi0OjlDcUfyv5SGlFKUaBVLMmgWR0C20UuQ6p5vdX2UKGgGaAloD0MIamrZWl9k97+UhpRSlGgVSzJoFkdAttEs+qzZ6HV9lChoBmgJaA9DCIDxDBr65wHAlIaUUpRoFUsyaBZHQLbSBxZdOZd1fZQoaAZoCWgPQwgi3jr/dln5v5SGlFKUaBVLMmgWR0C20ejqjaf0dX2UKGgGaAloD0MIU5RL4xd+B8CUhpRSlGgVSzJoFkdAttHJwiqyW3V9lChoBmgJaA9DCAPv5NNj2+y/lIaUUpRoFUsyaBZHQLbRq0ALiMp1fZQoaAZoCWgPQwgW3uUivtP6v5SGlFKUaBVLMmgWR0C20otxVAAydX2UKGgGaAloD0MIZDvfT43X/b+UhpRSlGgVSzJoFkdAttJtRiw0O3V9lChoBmgJaA9DCFiNJayN8fC/lIaUUpRoFUsyaBZHQLbSTfh/Aj91fZQoaAZoCWgPQwir0asBSkMIwJSGlFKUaBVLMmgWR0C20i9+5OJtdX2UKGgGaAloD0MINEqX/iUp+r+UhpRSlGgVSzJoFkdAttMKKCQLeHV9lChoBmgJaA9DCEGADB07iADAlIaUUpRoFUsyaBZHQLbS6/dIoVp1fZQoaAZoCWgPQwiIR+Ll6XwBwJSGlFKUaBVLMmgWR0C20synpB5YdX2UKGgGaAloD0MI8S4X8Z0Y8r+UhpRSlGgVSzJoFkdAttKuKl54W3V9lChoBmgJaA9DCFouG53zMwXAlIaUUpRoFUsyaBZHQLbTjX8fmtB1fZQoaAZoCWgPQwjTakjcY6n7v5SGlFKUaBVLMmgWR0C2029W6shgdX2UKGgGaAloD0MIX7THC+nw8r+UhpRSlGgVSzJoFkdAttNQCvHLinV9lChoBmgJaA9DCNrlWx/W2wHAlIaUUpRoFUsyaBZHQLbTMbW3BpJ1fZQoaAZoCWgPQwjECrd8JOXyv5SGlFKUaBVLMmgWR0C21Algtvn9dX2UKGgGaAloD0MIBP9byY4N/b+UhpRSlGgVSzJoFkdAttPrLcKw6nV9lChoBmgJaA9DCJtY4Cu6Nf6/lIaUUpRoFUsyaBZHQLbTy8A7xNJ1fZQoaAZoCWgPQwiyaDo7GRwBwJSGlFKUaBVLMmgWR0C2061G9YfXdX2UKGgGaAloD0MISMK+nUQEBMCUhpRSlGgVSzJoFkdAttSG8pTdcnV9lChoBmgJaA9DCB/3rdaJSwvAlIaUUpRoFUsyaBZHQLbUaMjNY8x1fZQoaAZoCWgPQwhOYaWCiqr6v5SGlFKUaBVLMmgWR0C21ElhsqJ/dX2UKGgGaAloD0MInZs24zSE/b+UhpRSlGgVSzJoFkdAttQqyon8bnV9lChoBmgJaA9DCMNkqmBUUgPAlIaUUpRoFUsyaBZHQLbVA9c8klh1fZQoaAZoCWgPQwiMuWsJ+eAAwJSGlFKUaBVLMmgWR0C21OW1x82KdX2UKGgGaAloD0MIVmR0QBK2+b+UhpRSlGgVSzJoFkdAttTGfukUK3V9lChoBmgJaA9DCIaRXtTul/W/lIaUUpRoFUsyaBZHQLbUqBV+7UZ1ZS4="}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 100000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "system_info": {"OS": "Linux-5.10.147+-x86_64-with-glibc2.31 # 1 SMP Sat Dec 10 16:00:40 UTC 2022", "Python": "3.9.16", "Stable-Baselines3": "1.7.0", "PyTorch": "1.13.1+cu116", "GPU Enabled": "True", "Numpy": "1.22.4", "Gym": "0.21.0"}}
|
replay.mp4
CHANGED
Binary files a/replay.mp4 and b/replay.mp4 differ
|
|
results.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"mean_reward": -
|
|
|
1 |
+
{"mean_reward": -1.9988439987413584, "std_reward": 0.561434414500327, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-03-15T15:32:16.917970"}
|
vec_normalize.pkl
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 3056
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ff4e78bfe0bb25dd94ce6bf6ba30bf45cf3e918c0789ab568efd454b738a4bcf
|
3 |
size 3056
|