File size: 1,198 Bytes
7e0d2ec |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 |
general_cfg:
algo_name: PPO
collect_traj: false
device: cpu
env_name: gym
load_checkpoint: true
load_model_step: best
load_path: Train_single_CartPole-v1_PPO_20230517-134440
max_episode: 10
max_step: 200
mode: test
model_save_fre: 10
mp_backend: single
n_workers: 2
online_eval: true
online_eval_episode: 10
seed: 1
algo_cfg:
actor_hidden_dim: 256
actor_layers:
- activation: relu
layer_dim:
- 256
layer_type: linear
- activation: relu
layer_dim:
- 256
layer_type: linear
actor_lr: 0.0003
batch_size: 256
buffer_type: ONPOLICY_QUE
continuous: false
critic_hidden_dim: 256
critic_layers:
- activation: relu
layer_dim:
- 256
layer_type: linear
- activation: relu
layer_dim:
- 256
layer_type: linear
critic_loss_coef: 0.5
critic_lr: 0.001
entropy_coef: 0.01
eps_clip: 0.2
gamma: 0.99
independ_actor: true
k_epochs: 4
kl_alpha: 2
kl_beta: 1.5
kl_lambda: 0.5
kl_target: 0.1
lr: 0.0001
min_policy: 0
ppo_type: clip
sgd_batch_size: 128
share_optimizer: false
env_cfg:
id: CartPole-v1
ignore_params:
- wrapper
- ignore_params
render_mode: null
wrapper: null
|