ppo-seals-Swimmer-v0 / config.yml
ernestum's picture
Initial commit
652e175
raw
history blame
544 Bytes
!!python/object/apply:collections.OrderedDict
- - - batch_size
- 8
- - clip_range
- 0.1
- - ent_coef
- 5.167107294612664e-08
- - gae_lambda
- 0.95
- - gamma
- 0.999
- - learning_rate
- 0.0001214437022727675
- - max_grad_norm
- 2
- - n_epochs
- 20
- - n_steps
- 2048
- - n_timesteps
- 1000000.0
- - normalize
- true
- - policy
- MlpPolicy
- - policy_kwargs
- dict(activation_fn=nn.Tanh, net_arch=[dict(pi=[64, 64], vf=[64, 64])])
- - vf_coef
- 0.6162112311062333