kasperchen
commited on
Commit
•
3e340ed
1
Parent(s):
174ad08
Initial commit
Browse files- README.md +1 -1
- a2c-PandaReachDense-v3.zip +2 -2
- a2c-PandaReachDense-v3/data +11 -11
- a2c-PandaReachDense-v3/policy.optimizer.pth +1 -1
- a2c-PandaReachDense-v3/policy.pth +1 -1
- a2c-PandaReachDense-v3/system_info.txt +1 -0
- config.json +1 -1
- replay.mp4 +0 -0
- results.json +1 -1
- vec_normalize.pkl +2 -2
README.md
CHANGED
@@ -16,7 +16,7 @@ model-index:
|
|
16 |
type: PandaReachDense-v3
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
-
value: -0.
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
|
|
16 |
type: PandaReachDense-v3
|
17 |
metrics:
|
18 |
- type: mean_reward
|
19 |
+
value: -0.18 +/- 0.12
|
20 |
name: mean_reward
|
21 |
verified: false
|
22 |
---
|
a2c-PandaReachDense-v3.zip
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:13e3c054336ca99b1e7721147e09a9cdb10cfd4e1f70d24df535c9adc8904add
|
3 |
+
size 106936
|
a2c-PandaReachDense-v3/data
CHANGED
@@ -4,9 +4,9 @@
|
|
4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
-
"__init__": "<function MultiInputActorCriticPolicy.__init__ at
|
8 |
"__abstractmethods__": "frozenset()",
|
9 |
-
"_abc_impl": "<_abc._abc_data object at
|
10 |
},
|
11 |
"verbose": 1,
|
12 |
"policy_kwargs": {
|
@@ -24,25 +24,25 @@
|
|
24 |
"_num_timesteps_at_start": 0,
|
25 |
"seed": null,
|
26 |
"action_noise": null,
|
27 |
-
"start_time":
|
28 |
"learning_rate": 0.0007,
|
29 |
"tensorboard_log": null,
|
30 |
"_last_obs": {
|
31 |
":type:": "<class 'collections.OrderedDict'>",
|
32 |
-
":serialized:": "
|
33 |
-
"achieved_goal": "[[
|
34 |
-
"desired_goal": "[[
|
35 |
-
"observation": "[[
|
36 |
},
|
37 |
"_last_episode_starts": {
|
38 |
":type:": "<class 'numpy.ndarray'>",
|
39 |
-
":serialized:": "
|
40 |
},
|
41 |
"_last_original_obs": {
|
42 |
":type:": "<class 'collections.OrderedDict'>",
|
43 |
-
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////
|
44 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
45 |
-
"desired_goal": "[[ 0.
|
46 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
47 |
},
|
48 |
"_episode_num": 0,
|
@@ -52,7 +52,7 @@
|
|
52 |
"_stats_window_size": 100,
|
53 |
"ep_info_buffer": {
|
54 |
":type:": "<class 'collections.deque'>",
|
55 |
-
":serialized:": "
|
56 |
},
|
57 |
"ep_success_buffer": {
|
58 |
":type:": "<class 'collections.deque'>",
|
|
|
4 |
":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=",
|
5 |
"__module__": "stable_baselines3.common.policies",
|
6 |
"__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ",
|
7 |
+
"__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f0e86b41bd0>",
|
8 |
"__abstractmethods__": "frozenset()",
|
9 |
+
"_abc_impl": "<_abc._abc_data object at 0x7f0e86b484c0>"
|
10 |
},
|
11 |
"verbose": 1,
|
12 |
"policy_kwargs": {
|
|
|
24 |
"_num_timesteps_at_start": 0,
|
25 |
"seed": null,
|
26 |
"action_noise": null,
|
27 |
+
"start_time": 1692698341529201901,
|
28 |
"learning_rate": 0.0007,
|
29 |
"tensorboard_log": null,
|
30 |
"_last_obs": {
|
31 |
":type:": "<class 'collections.OrderedDict'>",
|
32 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA5lCOPkdiM7zEyOc+5lCOPkdiM7zEyOc+3/RSPxKC8D4nPcw+Rjw9P7w0jz/6LKO/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAwQjNPxLFG76Pw2a/+q5fP7FONL60FSS/lyGoP0EWkD8JgdI+A+RuP7xGEz+rw4+/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADmUI4+R2IzvMTI5z5SmPw+A3d0u5Xcyj7mUI4+R2IzvMTI5z5SmPw+A3d0u5Xcyj7f9FI/EoLwPic9zD7wUpg/5OfOPxGxOz5GPD0/vDSPP/oso7/zVCg/lJdwPi3axL+UaA5LBEsGhpRoEnSUUpR1Lg==",
|
33 |
+
"achieved_goal": "[[ 0.27796096 -0.01094872 0.4527036 ]\n [ 0.27796096 -0.01094872 0.4527036 ]\n [ 0.82404894 0.46974236 0.39890406]\n [ 0.73920095 1.1187968 -1.2748101 ]]",
|
34 |
+
"desired_goal": "[[ 1.6018296 -0.15211895 -0.9014215 ]\n [ 0.8737637 -0.17608143 -0.64095616]\n [ 1.3135251 1.1256791 0.4111407 ]\n [ 0.9331667 0.5752981 -1.1231588 ]]",
|
35 |
+
"observation": "[[ 0.27796096 -0.01094872 0.4527036 0.4933496 -0.00373024 0.39621416]\n [ 0.27796096 -0.01094872 0.4527036 0.4933496 -0.00373024 0.39621416]\n [ 0.82404894 0.46974236 0.39890406 1.190031 1.6164517 0.18329264]\n [ 0.73920095 1.1187968 -1.2748101 0.6575462 0.23495322 -1.5379082 ]]"
|
36 |
},
|
37 |
"_last_episode_starts": {
|
38 |
":type:": "<class 'numpy.ndarray'>",
|
39 |
+
":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="
|
40 |
},
|
41 |
"_last_original_obs": {
|
42 |
":type:": "<class 'collections.OrderedDict'>",
|
43 |
+
":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAA1+KPDGH7bzERqU9we2FvQAyPb34iZQ+qY8XvoUl/z1I6I0+Yw0NPnXmEL6Ms6Y9lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==",
|
44 |
"achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]",
|
45 |
+
"desired_goal": "[[ 0.01689101 -0.02899513 0.08070138]\n [-0.06539489 -0.04619026 0.29011512]\n [-0.14800896 0.12458328 0.2771628 ]\n [ 0.13774638 -0.14150412 0.08139715]]",
|
46 |
"observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"
|
47 |
},
|
48 |
"_episode_num": 0,
|
|
|
52 |
"_stats_window_size": 100,
|
53 |
"ep_info_buffer": {
|
54 |
":type:": "<class 'collections.deque'>",
|
55 |
+
":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHv85ylvZRKpWMAWyUSwOMAXSUR0CbmrvOQhfTdX2UKGgGR7/CFYdQwblzaAdLAmgIR0CbmlCb+cYqdX2UKGgGR7/G2Dxsl9jPaAdLA2gIR0CbmeT4cm0FdX2UKGgGR7/HaxHG0eEJaAdLA2gIR0CbmYWUbDMvdX2UKGgGR7+535eqrBCVaAdLAmgIR0Cbmlxb0OEvdX2UKGgGR7/KTKT0QK8daAdLA2gIR0CbmtDbah6CdX2UKGgGR7/SjjaPCEYgaAdLA2gIR0CbmfmQr+YMdX2UKGgGR7/CCNCJGe+VaAdLAmgIR0CbmZS8an76dX2UKGgGR7+3PJJXhfjTaAdLAmgIR0CbmmuDBdledX2UKGgGR7/DEkSmIj4YaAdLAmgIR0Cbmtx7zCk5dX2UKGgGR7+0/KQq7ROUaAdLAmgIR0CbmgUYbbUPdX2UKGgGR7/Ldi2Dxsl+aAdLA2gIR0CbmaXRPXTWdX2UKGgGR7+/6j3225QQaAdLAmgIR0CbmugkTpPidX2UKGgGR7/SDwH7gsK9aAdLA2gIR0Cbmnzr/sE8dX2UKGgGR7+pDb8FY+0PaAdLAWgIR0CbmavTPSlWdX2UKGgGR7/SpnHvMKTjaAdLA2gIR0CbmhnM+u/2dX2UKGgGR7/I717IDHOsaAdLA2gIR0Cbmb+M6zVudX2UKGgGR7/d6Rhc7hegaAdLBGgIR0CbmwHck+otdX2UKGgGR7/cQla8pTddaAdLBGgIR0CbmpbvPToddX2UKGgGR7/Li2Dxsl9jaAdLA2gIR0CbmisZ5zHTdX2UKGgGR7+xv863iJfqaAdLAmgIR0CbmcujASFodX2UKGgGR7/CYAsCkoF3aAdLAmgIR0CbmxDzRQaadX2UKGgGR7+9vfj0cwQEaAdLAmgIR0CbmqYISlFddX2UKGgGR7/JeY2Kl54XaAdLA2gIR0Cbmj9TP0I1dX2UKGgGR7/EZ+hGpda/aAdLAmgIR0CbmdpeNT99dX2UKGgGR7/UJYDDCP6saAdLA2gIR0CbmyIxQBPsdX2UKGgGR7/SWwNb1RLsaAdLA2gIR0Cbmrb3XZoPdX2UKGgGR7/VoZQ53kgfaAdLA2gIR0CbmlEdNnGsdX2UKGgGR7+5F8XvYvnKaAdLAmgIR0CbmzHKfWc0dX2UKGgGR7/XaRISUTtcaAdLBGgIR0CbmfZxJd0JdX2UKGgGR7/Ig7HQyAQQaAdLA2gIR0Cbms08NhE0dX2UKGgGR7/BKDCgsbvPaAdLAmgIR0Cbmz5mRNh3dX2UKGgGR7/KUahpQDV6aAdLA2gIR0CbmmdznzQNdX2UKGgGR7/NUtqYZ2pyaAdLA2gIR0CbmggZCOWCdX2UKGgGR7/EdhiLEUCaaAdLA2gIR0Cbmt7dBSk1dX2UKGgGR7+5vAGjbi6yaAdLAmgIR0CbmnMPjGT+dX2UKGgGR7/Rzt1IRRMwaAdLA2gIR0Cbm1OafBepdX2UKGgGR7/MBMi8nNPhaAdLA2gIR0CbmhzsyBTXdX2UKGgGR7/A6T4cm0E6aAdLAmgIR0Cbm18UmD15dX2UKGgGR7/VyZ8a4tpVaAdLA2gIR0CbmoezD4xldX2UKGgGR7/c5WilBQenaAdLBGgIR0CbmvmIj4YadX2UKGgGR7+pltj0+TvBaAdLAWgIR0Cbmv72criEdX2UKGgGR7/Ns2NvOyE+aAdLA2gIR0Cbm3MA3kxRdX2UKGgGR7/OrZrYXfqHaAdLA2gIR0CbmpvkRzzVdX2UKGgGR7/YbJfYzzmPaAdLBGgIR0CbmjcFhXr/dX2UKGgGR7+ZXMhX8wYcaAdLAWgIR0Cbm3lJpWWAdX2UKGgGR7+7DiwSrYGuaAdLAmgIR0Cbmw3225QQdX2UKGgGR7++ylenhsInaAdLAmgIR0CbmkJ9RaX8dX2UKGgGR7/PdC3PRiPRaAdLA2gIR0Cbmq2exwAEdX2UKGgGR7/FeXzDn/1haAdLA2gIR0Cbm4qmTC+DdX2UKGgGR7/LXDFZPl+3aAdLA2gIR0Cbmx9+PRzBdX2UKGgGR7+91HOKO1fFaAdLAmgIR0Cbmk57w8W9dX2UKGgGR7+mH31zySV4aAdLAWgIR0CbmyiItUXIdX2UKGgGR7+ySU1Q66reaAdLAmgIR0Cbm5mhdt2tdX2UKGgGR7/IHGjsUqQSaAdLA2gIR0CbmsJ+lTFVdX2UKGgGR7/Ro2GZeAuqaAdLA2gIR0Cbmzn5BTn8dX2UKGgGR7/Gy1NQCSzPaAdLAmgIR0Cbms5eJHiFdX2UKGgGR7/ZD9wWFev7aAdLBGgIR0CbmmnE2pAEdX2UKGgGR7/J0cOskpqiaAdLA2gIR0Cbm6wM6RyPdX2UKGgGR7/D1bqyGBWgaAdLAmgIR0Cbm0kTpPhydX2UKGgGR7+2Ll3hXKbKaAdLAmgIR0Cbm7oRqXWwdX2UKGgGR7/Lw6ySmqHXaAdLA2gIR0CbmuLEk0JodX2UKGgGR7/YA9mpVCHAaAdLBGgIR0CbmoOyVv/BdX2UKGgGR7/UkNFz+3pfaAdLA2gIR0Cbm8tPHktFdX2UKGgGR7/XZ1FH8TBZaAdLBGgIR0Cbm2BU70WedX2UKGgGR7/U3iJfpljFaAdLBGgIR0CbmvzOHFgldX2UKGgGR7/KQJ5VwPy1aAdLA2gIR0Cbmpfq5byIdX2UKGgGR7/DmSyMUAT7aAdLAmgIR0Cbm26Ww/xEdX2UKGgGR7/UkPczqKP5aAdLA2gIR0Cbm9/HYHxCdX2UKGgGR7/Otrbg0j1PaAdLA2gIR0Cbmw3DNyHVdX2UKGgGR7+0YxcmjTKDaAdLAmgIR0Cbm+rVOKwZdX2UKGgGR7/UqZc9nscAaAdLBGgIR0Cbm4hXbM5fdX2UKGgGR7+/dXT3IuGsaAdLAmgIR0Cbm/nL7oB8dX2UKGgGR7/Rf5ULlV94aAdLA2gIR0CbmyJ4B3iadX2UKGgGR7/Q1twaR6njaAdLA2gIR0Cbm5kkrwvydX2UKGgGR7/jTAWSEDhcaAdLCGgIR0Cbmsg2ZRbbdX2UKGgGR7/HWBBiTdLyaAdLA2gIR0CbmzO4oZyddX2UKGgGR7/Y5O8CgbqAaAdLBGgIR0CbnBRs/IKddX2UKGgGR7/Ce5Fw1ivxaAdLAmgIR0Cbm6lSCOFQdX2UKGgGR7/LTjNpudf+aAdLA2gIR0Cbmt3HJcPfdX2UKGgGR7+wvalDWsijaAdLAmgIR0Cbm7SWqtHQdX2UKGgGR7/N6hQFcIJJaAdLA2gIR0Cbm0i0OVgQdX2UKGgGR7/FHvMKTjebaAdLA2gIR0CbnCZJTVDsdX2UKGgGR7+074i5d4VzaAdLAmgIR0CbnDQnx8UmdX2UKGgGR7/UG8VYZEUkaAdLA2gIR0Cbm8lGgBcSdX2UKGgGR7/IWiUPhAGCaAdLA2gIR0Cbm115jYqYdX2UKGgGR7/W752yLQ5WaAdLBGgIR0Cbmvi0v4/NdX2UKGgGR7+yLXL/0dzXaAdLAmgIR0CbnEAwPAfudX2UKGgGR7/Lnh86V+qjaAdLA2gIR0Cbm9ogV45cdX2UKGgGR7/SrjHXEqDsaAdLA2gIR0Cbm2446wMZdX2UKGgGR7+URaouPFNtaAdLAWgIR0Cbm+AJ9iMHdX2UKGgGR7+X0btJFspHaAdLAWgIR0Cbm3QVbiZOdX2UKGgGR7/RrgwXZXdTaAdLBGgIR0Cbmw+AEt/XdX2UKGgGR7/NrHEMspXqaAdLA2gIR0CbnFTz/ZM+dX2UKGgGR7+lYjjaPCEYaAdLAWgIR0Cbm+nO0LMLdX2UKGgGR7+og1WKdhAoaAdLAWgIR0CbmxjIJZ4fdX2UKGgGR7++PRzBAOawaAdLAmgIR0CbmyS3LFGYdX2UKGgGR7/hvM0P6KtQaAdLBGgIR0Cbm4/VRUFTdX2UKGgGR7/Zzu4PPLPlaAdLBGgIR0CbnG1ndweedX2UKGgGR7/aOFxn3+MqaAdLBGgIR0CbnAJmdy1edWUu"
|
56 |
},
|
57 |
"ep_success_buffer": {
|
58 |
":type:": "<class 'collections.deque'>",
|
a2c-PandaReachDense-v3/policy.optimizer.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 44734
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ab72f2d0970f3b8c4a68e51bf0d90c44b665b393b8d43f2cbe955f66b285cdc1
|
3 |
size 44734
|
a2c-PandaReachDense-v3/policy.pth
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 46014
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fd1aac3be256127a879c0049ca0286d86d723925575c0a320c706a16958f31a2
|
3 |
size 46014
|
a2c-PandaReachDense-v3/system_info.txt
CHANGED
@@ -6,3 +6,4 @@
|
|
6 |
- Numpy: 1.25.2
|
7 |
- Cloudpickle: 2.2.1
|
8 |
- Gymnasium: 0.29.0
|
|
|
|
6 |
- Numpy: 1.25.2
|
7 |
- Cloudpickle: 2.2.1
|
8 |
- Gymnasium: 0.29.0
|
9 |
+
- OpenAI Gym: 0.26.2
|
config.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7fb5c03425f0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7fb5c033ab40>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "num_timesteps": 1000000, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1692612523625942672, "learning_rate": 0.0007, "tensorboard_log": null, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAArhMWvyiq5754hrI+4NSFPjGOWjnR9uI+4NSFPjGOWjnR9uI+lyEUvwus1z4nj68+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAd7a/v0foWb9JMY8/yU+jPuMJgj/TNLq/DPPbPX+HnT+8Ch2/dRaQv6eCSj/TiGw/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAACuExa/KKrnvniGsj7Yt1m/aefPv91CZT/g1IU+MY5aOdH24j6MTfo+GFPjOhlTxj7g1IU+MY5aOdH24j6MTfo+GFPjOhlTxj6XIRS/C6zXPiePrz6An0m/+cDRPxP+YT+UaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[-5.8623779e-01 -4.5247006e-01 3.4868217e-01]\n [ 2.6138973e-01 2.0843071e-04 4.4328931e-01]\n [ 2.6138973e-01 2.0843071e-04 4.4328931e-01]\n [-5.7863754e-01 4.2123446e-01 3.4288904e-01]]", "desired_goal": "[[-1.4977559 -0.8512005 1.1186916 ]\n [ 0.3189681 1.0159267 -1.4547371 ]\n [ 0.10739717 1.2306975 -0.61344504]\n [-1.1256853 0.7910561 0.9239628 ]]", "observation": "[[-5.8623779e-01 -4.5247006e-01 3.4868217e-01 -8.5046148e-01\n -1.6242496e+00 8.9555150e-01]\n [ 2.6138973e-01 2.0843071e-04 4.4328931e-01 4.8887289e-01\n 1.7343489e-03 3.8735273e-01]\n [ 2.6138973e-01 2.0843071e-04 4.4328931e-01 4.8887289e-01\n 1.7343489e-03 3.8735273e-01]\n [-5.7863754e-01 4.2123446e-01 3.4288904e-01 -7.8759003e-01\n 1.6387016e+00 8.8278311e-01]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAABAQCUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAi4rVPf2iWD28uK49DlATPnPlSLskzx4+Utk0vCK+WDz+62s+xNxKPDm6gTxvsl0+lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[ 0.10426816 0.05288981 0.08531329]\n [ 0.14386007 -0.00306543 0.15508705]\n [-0.01103814 0.01322892 0.23039243]\n [ 0.01238174 0.01583587 0.21650098]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHv9OsIVuaWoqMAWyUSwOMAXSUR0CfoP6Kcd5qdX2UKGgGR7/QU6gdwNsnaAdLA2gIR0CfoJzcRDkVdX2UKGgGR7++YOUdJaq0aAdLAmgIR0Cfodg5BC2MdX2UKGgGR7+3Sc9W6shgaAdLAmgIR0CfoXWKuSwGdX2UKGgGR7+3xsl9jPOZaAdLAmgIR0CfoK4zJp35dX2UKGgGR7+yu4gA6uGLaAdLAmgIR0CfoeX8O09hdX2UKGgGR7/Xs2NvOyE+aAdLBGgIR0CfoR2L5ylvdX2UKGgGR7/Jgtvn8sMBaAdLA2gIR0CfoMJ5E+gUdX2UKGgGR7/Y1dgOSW7faAdLBGgIR0CfoZCf6Gg0dX2UKGgGR7/NVXFLnLaFaAdLA2gIR0Cfof2606YFdX2UKGgGR7+ZJTVDrqt6aAdLAWgIR0CfoZtjkMkQdX2UKGgGR7/R7DEWIoE0aAdLBGgIR0CfoTyC4BmxdX2UKGgGR7/IdyT6i0v5aAdLA2gIR0CfoNrVvuPWdX2UKGgGR7/MlqJuVHFxaAdLA2gIR0CfohKTjebedX2UKGgGR7/IOYplSS/1aAdLA2gIR0Cfoa/RVp9JdX2UKGgGR7+6JdjXnQpnaAdLAmgIR0CfoUphWo3rdX2UKGgGR7/AgWac7QsxaAdLAmgIR0CfoiOUdJardX2UKGgGR7/EPK+zt1IRaAdLAmgIR0CfoVtITXardX2UKGgGR7/Vz19ORDCxaAdLBGgIR0CfoPnfl6qsdX2UKGgGR7/L60pmVZ9vaAdLA2gIR0CfochDgIhRdX2UKGgGR7+792ovSMLnaAdLAmgIR0CfoQaoMrmRdX2UKGgGR7/MokzGgi/xaAdLA2gIR0Cfojfb9If9dX2UKGgGR7/ZhXbM5fdAaAdLBGgIR0CfoXVmBe5XdX2UKGgGR7+3BsQ/X5FgaAdLAmgIR0CfoRO7g88tdX2UKGgGR7/Z+R5kbxViaAdLBGgIR0CfoeYqG1x9dX2UKGgGR7/OFdLQHAymaAdLA2gIR0Cfok+GXXyzdX2UKGgGR7+8dXDFZPl/aAdLAmgIR0CfoST8HfMwdX2UKGgGR7/All9Sde6aaAdLAmgIR0CfofNZ/0/XdX2UKGgGR7/VkgwGnn+yaAdLA2gIR0CfoY3iJfpmdX2UKGgGR7/Bw84gieNDaAdLAmgIR0Cfol0/nnuBdX2UKGgGR7+k3CKrJbMYaAdLAWgIR0CfofqHoHLSdX2UKGgGR7+/PY4ACGN8aAdLAmgIR0CfoZuG9HtndX2UKGgGR7/ZdnTRYzSDaAdLBGgIR0CfoUP+GXXzdX2UKGgGR7/H3yqdYnv2aAdLA2gIR0CfonT+ee4DdX2UKGgGR7/Jl05lvqC6aAdLA2gIR0CfohJRwZO0dX2UKGgGR7/LiIcinpB5aAdLA2gIR0CfobVhTfixdX2UKGgGR7/F9lVcUucuaAdLAmgIR0CfoVOwPiDNdX2UKGgGR7+3MdLg4wRHaAdLAmgIR0CfoiG3nZCfdX2UKGgGR7/ShJAdGRV7aAdLA2gIR0CfoorTpgTidX2UKGgGR7+2W4Vh1DBuaAdLAmgIR0CfoWBtk4FSdX2UKGgGR7/LoIOYplSTaAdLA2gIR0CfocyrPt2LdX2UKGgGR7/Lpfx+az/qaAdLA2gIR0CfojkAPuohdX2UKGgGR7/QfZ26kIomaAdLA2gIR0CfoqKDkELZdX2UKGgGR7/PXYDklu3uaAdLA2gIR0CfoXghbGFSdX2UKGgGR7/AOJcgQpWnaAdLAmgIR0CfokZmqYJFdX2UKGgGR7/Tqp97WuoxaAdLA2gIR0CfoeE9+w1SdX2UKGgGR7/D7w8W9DhMaAdLA2gIR0Cforot+TePdX2UKGgGR7/L7/GVAzHkaAdLA2gIR0CfoY/Y8Md+dX2UKGgGR7/QhmGucMEzaAdLA2gIR0Cfol4mCyyEdX2UKGgGR7/SkSmIj4YaaAdLA2gIR0Cfofi9Zid8dX2UKGgGR7/QQ+UyHmA9aAdLA2gIR0Cfos4BFNL2dX2UKGgGR7/TQdjoZAIIaAdLA2gIR0CfoaRpDeCTdX2UKGgGR7/JmSyMUAT7aAdLA2gIR0CfonKu0TlDdX2UKGgGR7/ATwDvE0iyaAdLAmgIR0Cfot/etSyddX2UKGgGR7/YbutwJgLJaAdLBGgIR0Cfohd2xIJ7dX2UKGgGR7+nqxC6Ymb9aAdLAWgIR0Cfouaya/h3dX2UKGgGR7/PfO2RaHKwaAdLA2gIR0Cfobw/gR9PdX2UKGgGR7/MNYKYzBRAaAdLA2gIR0CfoopkPMB7dX2UKGgGR7+4BYFJQLuyaAdLAmgIR0CfoiTvAoG6dX2UKGgGR7+1OxjawljWaAdLAmgIR0CfovP+4smOdX2UKGgGR7/EHMUypJf6aAdLAmgIR0Cfocm/nGKidX2UKGgGR7+gN7SiM5wPaAdLAWgIR0CfodCYkVvddX2UKGgGR7/MTakAPuohaAdLA2gIR0CfoqJqZc9odX2UKGgGR7/LnmJWNm16aAdLA2gIR0CfowuVHFxXdX2UKGgGR7/WirksBhhIaAdLBGgIR0CfokMVk+X7dX2UKGgGR7/ENJe3QUpNaAdLAmgIR0CfoeFspG4JdX2UKGgGR7/BqhUR3/xUaAdLAmgIR0Cfoq+iJwbVdX2UKGgGR7/DMV1wHZ9NaAdLAmgIR0Cfok/+85CGdX2UKGgGR7/AAp8WsRxtaAdLAmgIR0Cfoe5PuXu3dX2UKGgGR7/FgWrOqvNeaAdLA2gIR0Cfox9l2/zrdX2UKGgGR7/CZbY9Pk7waAdLAmgIR0Cforyd4FA3dX2UKGgGR7+7cYZVGTcJaAdLAmgIR0CfosySFGoadX2UKGgGR7/RC1JDmbLEaAdLA2gIR0Cfomdsi0OWdX2UKGgGR7/WzVtoBaLXaAdLA2gIR0CfogW8yvcKdX2UKGgGR7/H/QSi/O+qaAdLA2gIR0CfozbZezD5dX2UKGgGR7+/sv7FbVz7aAdLAmgIR0CfonRNRFZxdX2UKGgGR7+3PTodMj/uaAdLAmgIR0CfohKXfIjodX2UKGgGR7/B0OmR/3FlaAdLAmgIR0Cfo0OcDr7gdX2UKGgGR7+jhWHUMG5daAdLAWgIR0Cfo02AG0NSdX2UKGgGR7/ZaOgg5imVaAdLBGgIR0CfourWAf+1dX2UKGgGR7+/IXCTEBKdaAdLAmgIR0CfooVZcLSedX2UKGgGR7/RAo5PuXu3aAdLA2gIR0Cfoimqo60ZdX2UKGgGR7/Fp3X7Lt/naAdLA2gIR0Cfo2BlcyFgdX2UKGgGR7/JZamoBJZoaAdLA2gIR0Cfov3C9AX3dX2UKGgGR7/U9WIXTEzgaAdLBGgIR0Cfop6hxo7FdX2UKGgGR7++tozvZyuIaAdLAmgIR0Cfo3GiYb84dX2UKGgGR7/EN2C/XXiBaAdLAmgIR0Cfow8GcFyJdX2UKGgGR7/a1L8Jlar4aAdLBGgIR0CfokfiPyTZdX2UKGgGR7/Q0JWvKU3XaAdLA2gIR0CforYukDZEdX2UKGgGR7/FVhkRSP2gaAdLA2gIR0Cfo4VbA1vVdX2UKGgGR7/YSL61stTUaAdLBGgIR0CfoykHlfZ3dX2UKGgGR7/DADaGpMpPaAdLAmgIR0CfosOpbUw0dX2UKGgGR7/YZGax5cC6aAdLBGgIR0CfomIn0CiidX2UKGgGR7/Qyup0fYBeaAdLA2gIR0Cfo50TDfm+dX2UKGgGR7/AzTnaFmFraAdLAmgIR0CfotStvGZNdX2UKGgGR7/KTmnwXqJNaAdLA2gIR0Cfo0D3ueBhdX2UKGgGR7/CWv8qFyq/aAdLAmgIR0Cfo6qXnhbXdX2UKGgGR7/ZLS/j81n/aAdLBGgIR0CfooDgIhQndX2UKGgGR7++2PT5O8CgaAdLAmgIR0Cfo1D9fkWAdX2UKGgGR7/F5zHS4OMEaAdLA2gIR0CfouwnH/96dWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 50000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "observation_space": {":type:": "<class 'gymnasium.spaces.dict.Dict'>", ":serialized:": "gAWVsAMAAAAAAACMFWd5bW5hc2l1bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwUZ3ltbmFzaXVtLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowNYm91bmRlZF9iZWxvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYDAAAAAAAAAAEBAZRoE4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoHCiWAwAAAAAAAAABAQGUaCBLA4WUaCR0lFKUjAZfc2hhcGWUSwOFlIwDbG93lGgcKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZRoJHSUUpSMBGhpZ2iUaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlIwIbG93X3JlcHKUjAUtMTAuMJSMCWhpZ2hfcmVwcpSMBDEwLjCUjApfbnBfcmFuZG9tlE51YowMZGVzaXJlZF9nb2FslGgNKYGUfZQoaBBoFmgZaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgnaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgsSwOFlGguaBwolgwAAAAAAAAAAAAgwQAAIMEAACDBlGgWSwOFlGgkdJRSlGgzaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgWaBloHCiWBgAAAAAAAAABAQEBAQGUaCBLBoWUaCR0lFKUaCdoHCiWBgAAAAAAAAABAQEBAQGUaCBLBoWUaCR0lFKUaCxLBoWUaC5oHCiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBZLBoWUaCR0lFKUaDNoHCiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBZLBoWUaCR0lFKUaDiMBS0xMC4wlGg6jAQxMC4wlGg8TnVidWgsTmgQTmg8TnViLg==", "spaces": "OrderedDict([('achieved_goal', Box(-10.0, 10.0, (3,), float32)), ('desired_goal', Box(-10.0, 10.0, (3,), float32)), ('observation', Box(-10.0, 10.0, (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVnQEAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWAwAAAAAAAAABAQGUaAiMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUjAFDlHSUUpSMDWJvdW5kZWRfYWJvdmWUaBEolgMAAAAAAAAAAQEBlGgVSwOFlGgZdJRSlIwGX3NoYXBllEsDhZSMA2xvd5RoESiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaAtLA4WUaBl0lFKUjARoaWdolGgRKJYMAAAAAAAAAAAAgD8AAIA/AACAP5RoC0sDhZRoGXSUUpSMCGxvd19yZXBylIwELTEuMJSMCWhpZ2hfcmVwcpSMAzEuMJSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "low_repr": "-1.0", "high_repr": "1.0", "_np_random": null}, "n_envs": 4, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVAQMAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMZy9ob21lL21sMi9hbmFjb25kYTMvZW52cy9odWdnaW5nZmFjZS1hMmMvbGliL3B5dGhvbjMuMTAvc2l0ZS1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuDQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjGcvaG9tZS9tbDIvYW5hY29uZGEzL2VudnMvaHVnZ2luZ2ZhY2UtYTJjL2xpYi9weXRob24zLjEwL3NpdGUtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz9G8AaNuLrHhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-5.15.0-79-generic-x86_64-with-glibc2.31 # 86~20.04.2-Ubuntu SMP Mon Jul 17 23:27:17 UTC 2023", "Python": "3.10.12", "Stable-Baselines3": "2.1.0", "PyTorch": "2.0.1+cu117", "GPU Enabled": "True", "Numpy": "1.25.2", "Cloudpickle": "2.2.1", "Gymnasium": "0.29.0"}}
|
|
|
1 |
+
{"policy_class": {":type:": "<class 'abc.ABCMeta'>", ":serialized:": "gAWVRQAAAAAAAACMIXN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbi5wb2xpY2llc5SMG011bHRpSW5wdXRBY3RvckNyaXRpY1BvbGljeZSTlC4=", "__module__": "stable_baselines3.common.policies", "__doc__": "\n MultiInputActorClass policy class for actor-critic algorithms (has both policy and value prediction).\n Used by A2C, PPO and the likes.\n\n :param observation_space: Observation space (Tuple)\n :param action_space: Action space\n :param lr_schedule: Learning rate schedule (could be constant)\n :param net_arch: The specification of the policy and value networks.\n :param activation_fn: Activation function\n :param ortho_init: Whether to use or not orthogonal initialization\n :param use_sde: Whether to use State Dependent Exploration or not\n :param log_std_init: Initial value for the log standard deviation\n :param full_std: Whether to use (n_features x n_actions) parameters\n for the std instead of only (n_features,) when using gSDE\n :param use_expln: Use ``expln()`` function instead of ``exp()`` to ensure\n a positive standard deviation (cf paper). It allows to keep variance\n above zero and prevent it from growing too fast. In practice, ``exp()`` is usually enough.\n :param squash_output: Whether to squash the output using a tanh function,\n this allows to ensure boundaries when using gSDE.\n :param features_extractor_class: Uses the CombinedExtractor\n :param features_extractor_kwargs: Keyword arguments\n to pass to the features extractor.\n :param share_features_extractor: If True, the features extractor is shared between the policy and value networks.\n :param normalize_images: Whether to normalize images or not,\n dividing by 255.0 (True by default)\n :param optimizer_class: The optimizer to use,\n ``th.optim.Adam`` by default\n :param optimizer_kwargs: Additional keyword arguments,\n excluding the learning rate, to pass to the optimizer\n ", "__init__": "<function MultiInputActorCriticPolicy.__init__ at 0x7f0e86b41bd0>", "__abstractmethods__": "frozenset()", "_abc_impl": "<_abc._abc_data object at 0x7f0e86b484c0>"}, "verbose": 1, "policy_kwargs": {":type:": "<class 'dict'>", ":serialized:": "gAWVgQAAAAAAAAB9lCiMD29wdGltaXplcl9jbGFzc5SME3RvcmNoLm9wdGltLnJtc3Byb3CUjAdSTVNwcm9wlJOUjBBvcHRpbWl6ZXJfa3dhcmdzlH2UKIwFYWxwaGGURz/vrhR64UeujANlcHOURz7k+LWI42jxjAx3ZWlnaHRfZGVjYXmUSwB1dS4=", "optimizer_class": "<class 'torch.optim.rmsprop.RMSprop'>", "optimizer_kwargs": {"alpha": 0.99, "eps": 1e-05, "weight_decay": 0}}, "num_timesteps": 1000000, "_total_timesteps": 1000000, "_num_timesteps_at_start": 0, "seed": null, "action_noise": null, "start_time": 1692698341529201901, "learning_rate": 0.0007, "tensorboard_log": null, "_last_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA5lCOPkdiM7zEyOc+5lCOPkdiM7zEyOc+3/RSPxKC8D4nPcw+Rjw9P7w0jz/6LKO/lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAwQjNPxLFG76Pw2a/+q5fP7FONL60FSS/lyGoP0EWkD8JgdI+A+RuP7xGEz+rw4+/lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADmUI4+R2IzvMTI5z5SmPw+A3d0u5Xcyj7mUI4+R2IzvMTI5z5SmPw+A3d0u5Xcyj7f9FI/EoLwPic9zD7wUpg/5OfOPxGxOz5GPD0/vDSPP/oso7/zVCg/lJdwPi3axL+UaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 0.27796096 -0.01094872 0.4527036 ]\n [ 0.27796096 -0.01094872 0.4527036 ]\n [ 0.82404894 0.46974236 0.39890406]\n [ 0.73920095 1.1187968 -1.2748101 ]]", "desired_goal": "[[ 1.6018296 -0.15211895 -0.9014215 ]\n [ 0.8737637 -0.17608143 -0.64095616]\n [ 1.3135251 1.1256791 0.4111407 ]\n [ 0.9331667 0.5752981 -1.1231588 ]]", "observation": "[[ 0.27796096 -0.01094872 0.4527036 0.4933496 -0.00373024 0.39621416]\n [ 0.27796096 -0.01094872 0.4527036 0.4933496 -0.00373024 0.39621416]\n [ 0.82404894 0.46974236 0.39890406 1.190031 1.6164517 0.18329264]\n [ 0.73920095 1.1187968 -1.2748101 0.6575462 0.23495322 -1.5379082 ]]"}, "_last_episode_starts": {":type:": "<class 'numpy.ndarray'>", ":serialized:": "gAWVdwAAAAAAAACMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYEAAAAAAAAAAEBAACUjAVudW1weZSMBWR0eXBllJOUjAJiMZSJiIeUUpQoSwOMAXyUTk5OSv////9K/////0sAdJRiSwSFlIwBQ5R0lFKULg=="}, "_last_original_obs": {":type:": "<class 'collections.OrderedDict'>", ":serialized:": "gAWVuwEAAAAAAACMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwSbnVtcHkuY29yZS5udW1lcmljlIwLX2Zyb21idWZmZXKUk5QoljAAAAAAAAAA6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+6nIdPRlsGqxDI0o+lIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYksESwOGlIwBQ5R0lFKUjAxkZXNpcmVkX2dvYWyUaAcoljAAAAAAAAAAA1+KPDGH7bzERqU9we2FvQAyPb34iZQ+qY8XvoUl/z1I6I0+Yw0NPnXmEL6Ms6Y9lGgOSwRLA4aUaBJ0lFKUjAtvYnNlcnZhdGlvbpRoByiWYAAAAAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAADqch09GWwarEMjSj4AAAAAAAAAgAAAAACUaA5LBEsGhpRoEnSUUpR1Lg==", "achieved_goal": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01]]", "desired_goal": "[[ 0.01689101 -0.02899513 0.08070138]\n [-0.06539489 -0.04619026 0.29011512]\n [-0.14800896 0.12458328 0.2771628 ]\n [ 0.13774638 -0.14150412 0.08139715]]", "observation": "[[ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]\n [ 3.8439669e-02 -2.1944723e-12 1.9740014e-01 0.0000000e+00\n -0.0000000e+00 0.0000000e+00]]"}, "_episode_num": 0, "use_sde": false, "sde_sample_freq": -1, "_current_progress_remaining": 0.0, "_stats_window_size": 100, "ep_info_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWV4AsAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKUKH2UKIwBcpRHv85ylvZRKpWMAWyUSwOMAXSUR0CbmrvOQhfTdX2UKGgGR7/CFYdQwblzaAdLAmgIR0CbmlCb+cYqdX2UKGgGR7/G2Dxsl9jPaAdLA2gIR0CbmeT4cm0FdX2UKGgGR7/HaxHG0eEJaAdLA2gIR0CbmYWUbDMvdX2UKGgGR7+535eqrBCVaAdLAmgIR0Cbmlxb0OEvdX2UKGgGR7/KTKT0QK8daAdLA2gIR0CbmtDbah6CdX2UKGgGR7/SjjaPCEYgaAdLA2gIR0CbmfmQr+YMdX2UKGgGR7/CCNCJGe+VaAdLAmgIR0CbmZS8an76dX2UKGgGR7+3PJJXhfjTaAdLAmgIR0CbmmuDBdledX2UKGgGR7/DEkSmIj4YaAdLAmgIR0Cbmtx7zCk5dX2UKGgGR7+0/KQq7ROUaAdLAmgIR0CbmgUYbbUPdX2UKGgGR7/Ldi2Dxsl+aAdLA2gIR0CbmaXRPXTWdX2UKGgGR7+/6j3225QQaAdLAmgIR0CbmugkTpPidX2UKGgGR7/SDwH7gsK9aAdLA2gIR0Cbmnzr/sE8dX2UKGgGR7+pDb8FY+0PaAdLAWgIR0CbmavTPSlWdX2UKGgGR7/SpnHvMKTjaAdLA2gIR0CbmhnM+u/2dX2UKGgGR7/I717IDHOsaAdLA2gIR0Cbmb+M6zVudX2UKGgGR7/d6Rhc7hegaAdLBGgIR0CbmwHck+otdX2UKGgGR7/cQla8pTddaAdLBGgIR0CbmpbvPToddX2UKGgGR7/Li2Dxsl9jaAdLA2gIR0CbmisZ5zHTdX2UKGgGR7+xv863iJfqaAdLAmgIR0CbmcujASFodX2UKGgGR7/CYAsCkoF3aAdLAmgIR0CbmxDzRQaadX2UKGgGR7+9vfj0cwQEaAdLAmgIR0CbmqYISlFddX2UKGgGR7/JeY2Kl54XaAdLA2gIR0Cbmj9TP0I1dX2UKGgGR7/EZ+hGpda/aAdLAmgIR0CbmdpeNT99dX2UKGgGR7/UJYDDCP6saAdLA2gIR0CbmyIxQBPsdX2UKGgGR7/SWwNb1RLsaAdLA2gIR0Cbmrb3XZoPdX2UKGgGR7/VoZQ53kgfaAdLA2gIR0CbmlEdNnGsdX2UKGgGR7+5F8XvYvnKaAdLAmgIR0CbmzHKfWc0dX2UKGgGR7/XaRISUTtcaAdLBGgIR0CbmfZxJd0JdX2UKGgGR7/Ig7HQyAQQaAdLA2gIR0Cbms08NhE0dX2UKGgGR7/BKDCgsbvPaAdLAmgIR0Cbmz5mRNh3dX2UKGgGR7/KUahpQDV6aAdLA2gIR0CbmmdznzQNdX2UKGgGR7/NUtqYZ2pyaAdLA2gIR0CbmggZCOWCdX2UKGgGR7/EdhiLEUCaaAdLA2gIR0Cbmt7dBSk1dX2UKGgGR7+5vAGjbi6yaAdLAmgIR0CbmnMPjGT+dX2UKGgGR7/Rzt1IRRMwaAdLA2gIR0Cbm1OafBepdX2UKGgGR7/MBMi8nNPhaAdLA2gIR0CbmhzsyBTXdX2UKGgGR7/A6T4cm0E6aAdLAmgIR0Cbm18UmD15dX2UKGgGR7/VyZ8a4tpVaAdLA2gIR0CbmoezD4xldX2UKGgGR7/c5WilBQenaAdLBGgIR0CbmvmIj4YadX2UKGgGR7+pltj0+TvBaAdLAWgIR0Cbmv72criEdX2UKGgGR7/Ns2NvOyE+aAdLA2gIR0Cbm3MA3kxRdX2UKGgGR7/OrZrYXfqHaAdLA2gIR0CbmpvkRzzVdX2UKGgGR7/YbJfYzzmPaAdLBGgIR0CbmjcFhXr/dX2UKGgGR7+ZXMhX8wYcaAdLAWgIR0Cbm3lJpWWAdX2UKGgGR7+7DiwSrYGuaAdLAmgIR0Cbmw3225QQdX2UKGgGR7++ylenhsInaAdLAmgIR0CbmkJ9RaX8dX2UKGgGR7/PdC3PRiPRaAdLA2gIR0Cbmq2exwAEdX2UKGgGR7/FeXzDn/1haAdLA2gIR0Cbm4qmTC+DdX2UKGgGR7/LXDFZPl+3aAdLA2gIR0Cbmx9+PRzBdX2UKGgGR7+91HOKO1fFaAdLAmgIR0Cbmk57w8W9dX2UKGgGR7+mH31zySV4aAdLAWgIR0CbmyiItUXIdX2UKGgGR7+ySU1Q66reaAdLAmgIR0Cbm5mhdt2tdX2UKGgGR7/IHGjsUqQSaAdLA2gIR0CbmsJ+lTFVdX2UKGgGR7/Ro2GZeAuqaAdLA2gIR0Cbmzn5BTn8dX2UKGgGR7/Gy1NQCSzPaAdLAmgIR0Cbms5eJHiFdX2UKGgGR7/ZD9wWFev7aAdLBGgIR0CbmmnE2pAEdX2UKGgGR7/J0cOskpqiaAdLA2gIR0Cbm6wM6RyPdX2UKGgGR7/D1bqyGBWgaAdLAmgIR0Cbm0kTpPhydX2UKGgGR7+2Ll3hXKbKaAdLAmgIR0Cbm7oRqXWwdX2UKGgGR7/Lw6ySmqHXaAdLA2gIR0CbmuLEk0JodX2UKGgGR7/YA9mpVCHAaAdLBGgIR0CbmoOyVv/BdX2UKGgGR7/UkNFz+3pfaAdLA2gIR0Cbm8tPHktFdX2UKGgGR7/XZ1FH8TBZaAdLBGgIR0Cbm2BU70WedX2UKGgGR7/U3iJfpljFaAdLBGgIR0CbmvzOHFgldX2UKGgGR7/KQJ5VwPy1aAdLA2gIR0Cbmpfq5byIdX2UKGgGR7/DmSyMUAT7aAdLAmgIR0Cbm26Ww/xEdX2UKGgGR7/UkPczqKP5aAdLA2gIR0Cbm9/HYHxCdX2UKGgGR7/Otrbg0j1PaAdLA2gIR0Cbmw3DNyHVdX2UKGgGR7+0YxcmjTKDaAdLAmgIR0Cbm+rVOKwZdX2UKGgGR7/UqZc9nscAaAdLBGgIR0Cbm4hXbM5fdX2UKGgGR7+/dXT3IuGsaAdLAmgIR0Cbm/nL7oB8dX2UKGgGR7/Rf5ULlV94aAdLA2gIR0CbmyJ4B3iadX2UKGgGR7/Q1twaR6njaAdLA2gIR0Cbm5kkrwvydX2UKGgGR7/jTAWSEDhcaAdLCGgIR0Cbmsg2ZRbbdX2UKGgGR7/HWBBiTdLyaAdLA2gIR0CbmzO4oZyddX2UKGgGR7/Y5O8CgbqAaAdLBGgIR0CbnBRs/IKddX2UKGgGR7/Ce5Fw1ivxaAdLAmgIR0Cbm6lSCOFQdX2UKGgGR7/LTjNpudf+aAdLA2gIR0Cbmt3HJcPfdX2UKGgGR7+wvalDWsijaAdLAmgIR0Cbm7SWqtHQdX2UKGgGR7/N6hQFcIJJaAdLA2gIR0Cbm0i0OVgQdX2UKGgGR7/FHvMKTjebaAdLA2gIR0CbnCZJTVDsdX2UKGgGR7+074i5d4VzaAdLAmgIR0CbnDQnx8UmdX2UKGgGR7/UG8VYZEUkaAdLA2gIR0Cbm8lGgBcSdX2UKGgGR7/IWiUPhAGCaAdLA2gIR0Cbm115jYqYdX2UKGgGR7/W752yLQ5WaAdLBGgIR0Cbmvi0v4/NdX2UKGgGR7+yLXL/0dzXaAdLAmgIR0CbnEAwPAfudX2UKGgGR7/Lnh86V+qjaAdLA2gIR0Cbm9ogV45cdX2UKGgGR7/SrjHXEqDsaAdLA2gIR0Cbm2446wMZdX2UKGgGR7+URaouPFNtaAdLAWgIR0Cbm+AJ9iMHdX2UKGgGR7+X0btJFspHaAdLAWgIR0Cbm3QVbiZOdX2UKGgGR7/RrgwXZXdTaAdLBGgIR0Cbmw+AEt/XdX2UKGgGR7/NrHEMspXqaAdLA2gIR0CbnFTz/ZM+dX2UKGgGR7+lYjjaPCEYaAdLAWgIR0Cbm+nO0LMLdX2UKGgGR7+og1WKdhAoaAdLAWgIR0CbmxjIJZ4fdX2UKGgGR7++PRzBAOawaAdLAmgIR0CbmyS3LFGYdX2UKGgGR7/hvM0P6KtQaAdLBGgIR0Cbm4/VRUFTdX2UKGgGR7/Zzu4PPLPlaAdLBGgIR0CbnG1ndweedX2UKGgGR7/aOFxn3+MqaAdLBGgIR0CbnAJmdy1edWUu"}, "ep_success_buffer": {":type:": "<class 'collections.deque'>", ":serialized:": "gAWVIAAAAAAAAACMC2NvbGxlY3Rpb25zlIwFZGVxdWWUk5QpS2SGlFKULg=="}, "_n_updates": 50000, "n_steps": 5, "gamma": 0.99, "gae_lambda": 1.0, "ent_coef": 0.0, "vf_coef": 0.5, "max_grad_norm": 0.5, "normalize_advantage": false, "observation_space": {":type:": "<class 'gymnasium.spaces.dict.Dict'>", ":serialized:": "gAWVsAMAAAAAAACMFWd5bW5hc2l1bS5zcGFjZXMuZGljdJSMBERpY3SUk5QpgZR9lCiMBnNwYWNlc5SMC2NvbGxlY3Rpb25zlIwLT3JkZXJlZERpY3SUk5QpUpQojA1hY2hpZXZlZF9nb2FslIwUZ3ltbmFzaXVtLnNwYWNlcy5ib3iUjANCb3iUk5QpgZR9lCiMBWR0eXBllIwFbnVtcHmUjAVkdHlwZZSTlIwCZjSUiYiHlFKUKEsDjAE8lE5OTkr/////Sv////9LAHSUYowNYm91bmRlZF9iZWxvd5SMEm51bXB5LmNvcmUubnVtZXJpY5SMC19mcm9tYnVmZmVylJOUKJYDAAAAAAAAAAEBAZRoE4wCYjGUiYiHlFKUKEsDjAF8lE5OTkr/////Sv////9LAHSUYksDhZSMAUOUdJRSlIwNYm91bmRlZF9hYm92ZZRoHCiWAwAAAAAAAAABAQGUaCBLA4WUaCR0lFKUjAZfc2hhcGWUSwOFlIwDbG93lGgcKJYMAAAAAAAAAAAAIMEAACDBAAAgwZRoFksDhZRoJHSUUpSMBGhpZ2iUaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlIwIbG93X3JlcHKUjAUtMTAuMJSMCWhpZ2hfcmVwcpSMBDEwLjCUjApfbnBfcmFuZG9tlE51YowMZGVzaXJlZF9nb2FslGgNKYGUfZQoaBBoFmgZaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgnaBwolgMAAAAAAAAAAQEBlGggSwOFlGgkdJRSlGgsSwOFlGguaBwolgwAAAAAAAAAAAAgwQAAIMEAACDBlGgWSwOFlGgkdJRSlGgzaBwolgwAAAAAAAAAAAAgQQAAIEEAACBBlGgWSwOFlGgkdJRSlGg4jAUtMTAuMJRoOowEMTAuMJRoPE51YowLb2JzZXJ2YXRpb26UaA0pgZR9lChoEGgWaBloHCiWBgAAAAAAAAABAQEBAQGUaCBLBoWUaCR0lFKUaCdoHCiWBgAAAAAAAAABAQEBAQGUaCBLBoWUaCR0lFKUaCxLBoWUaC5oHCiWGAAAAAAAAAAAACDBAAAgwQAAIMEAACDBAAAgwQAAIMGUaBZLBoWUaCR0lFKUaDNoHCiWGAAAAAAAAAAAACBBAAAgQQAAIEEAACBBAAAgQQAAIEGUaBZLBoWUaCR0lFKUaDiMBS0xMC4wlGg6jAQxMC4wlGg8TnVidWgsTmgQTmg8TnViLg==", "spaces": "OrderedDict([('achieved_goal', Box(-10.0, 10.0, (3,), float32)), ('desired_goal', Box(-10.0, 10.0, (3,), float32)), ('observation', Box(-10.0, 10.0, (6,), float32))])", "_shape": null, "dtype": null, "_np_random": null}, "action_space": {":type:": "<class 'gymnasium.spaces.box.Box'>", ":serialized:": "gAWVnQEAAAAAAACMFGd5bW5hc2l1bS5zcGFjZXMuYm94lIwDQm94lJOUKYGUfZQojAVkdHlwZZSMBW51bXB5lIwFZHR5cGWUk5SMAmY0lImIh5RSlChLA4wBPJROTk5K/////0r/////SwB0lGKMDWJvdW5kZWRfYmVsb3eUjBJudW1weS5jb3JlLm51bWVyaWOUjAtfZnJvbWJ1ZmZlcpSTlCiWAwAAAAAAAAABAQGUaAiMAmIxlImIh5RSlChLA4wBfJROTk5K/////0r/////SwB0lGJLA4WUjAFDlHSUUpSMDWJvdW5kZWRfYWJvdmWUaBEolgMAAAAAAAAAAQEBlGgVSwOFlGgZdJRSlIwGX3NoYXBllEsDhZSMA2xvd5RoESiWDAAAAAAAAAAAAIC/AACAvwAAgL+UaAtLA4WUaBl0lFKUjARoaWdolGgRKJYMAAAAAAAAAAAAgD8AAIA/AACAP5RoC0sDhZRoGXSUUpSMCGxvd19yZXBylIwELTEuMJSMCWhpZ2hfcmVwcpSMAzEuMJSMCl9ucF9yYW5kb22UTnViLg==", "dtype": "float32", "bounded_below": "[ True True True]", "bounded_above": "[ True True True]", "_shape": [3], "low": "[-1. -1. -1.]", "high": "[1. 1. 1.]", "low_repr": "-1.0", "high_repr": "1.0", "_np_random": null}, "n_envs": 4, "lr_schedule": {":type:": "<class 'function'>", ":serialized:": "gAWVAQMAAAAAAACMF2Nsb3VkcGlja2xlLmNsb3VkcGlja2xllIwOX21ha2VfZnVuY3Rpb26Uk5QoaACMDV9idWlsdGluX3R5cGWUk5SMCENvZGVUeXBllIWUUpQoSwFLAEsASwFLAUsTQwSIAFMAlE6FlCmMAV+UhZSMZy9ob21lL21sMi9hbmFjb25kYTMvZW52cy9odWdnaW5nZmFjZS1hMmMvbGliL3B5dGhvbjMuMTAvc2l0ZS1wYWNrYWdlcy9zdGFibGVfYmFzZWxpbmVzMy9jb21tb24vdXRpbHMucHmUjARmdW5jlEuDQwIEAZSMA3ZhbJSFlCl0lFKUfZQojAtfX3BhY2thZ2VfX5SMGHN0YWJsZV9iYXNlbGluZXMzLmNvbW1vbpSMCF9fbmFtZV9flIwec3RhYmxlX2Jhc2VsaW5lczMuY29tbW9uLnV0aWxzlIwIX19maWxlX1+UjGcvaG9tZS9tbDIvYW5hY29uZGEzL2VudnMvaHVnZ2luZ2ZhY2UtYTJjL2xpYi9weXRob24zLjEwL3NpdGUtcGFja2FnZXMvc3RhYmxlX2Jhc2VsaW5lczMvY29tbW9uL3V0aWxzLnB5lHVOTmgAjBBfbWFrZV9lbXB0eV9jZWxslJOUKVKUhZR0lFKUjBxjbG91ZHBpY2tsZS5jbG91ZHBpY2tsZV9mYXN0lIwSX2Z1bmN0aW9uX3NldHN0YXRllJOUaB99lH2UKGgWaA2MDF9fcXVhbG5hbWVfX5SMGWNvbnN0YW50X2ZuLjxsb2NhbHM+LmZ1bmOUjA9fX2Fubm90YXRpb25zX1+UfZSMDl9fa3dkZWZhdWx0c19flE6MDF9fZGVmYXVsdHNfX5ROjApfX21vZHVsZV9flGgXjAdfX2RvY19flE6MC19fY2xvc3VyZV9flGgAjApfbWFrZV9jZWxslJOURz9G8AaNuLrHhZRSlIWUjBdfY2xvdWRwaWNrbGVfc3VibW9kdWxlc5RdlIwLX19nbG9iYWxzX1+UfZR1hpSGUjAu"}, "system_info": {"OS": "Linux-5.15.0-79-generic-x86_64-with-glibc2.31 # 86~20.04.2-Ubuntu SMP Mon Jul 17 23:27:17 UTC 2023", "Python": "3.10.12", "Stable-Baselines3": "2.1.0", "PyTorch": "2.0.1+cu117", "GPU Enabled": "True", "Numpy": "1.25.2", "Cloudpickle": "2.2.1", "Gymnasium": "0.29.0", "OpenAI Gym": "0.26.2"}}
|
replay.mp4
ADDED
Binary file (685 kB). View file
|
|
results.json
CHANGED
@@ -1 +1 @@
|
|
1 |
-
{"mean_reward": -0.
|
|
|
1 |
+
{"mean_reward": -0.1784882658161223, "std_reward": 0.11570971806934204, "is_deterministic": true, "n_eval_episodes": 10, "eval_datetime": "2023-08-22T18:45:46.300873"}
|
vec_normalize.pkl
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d385929ee9580a10b20d98cd23a52a5620f5ea1e2b3bb6bd4f1243a13cbe65d0
|
3 |
+
size 2623
|