|
{ |
|
"action_chunk_size": 5, |
|
"bet_softmax_temperature": 0.01, |
|
"crop_is_random": true, |
|
"crop_shape": [ |
|
84, |
|
84 |
|
], |
|
"dropout": 0.1, |
|
"gpt_block_size": 500, |
|
"gpt_hidden_dim": 512, |
|
"gpt_input_dim": 512, |
|
"gpt_n_head": 8, |
|
"gpt_n_layer": 8, |
|
"gpt_output_dim": 512, |
|
"input_normalization_modes": { |
|
"observation.image": "mean_std", |
|
"observation.state": "min_max" |
|
}, |
|
"input_shapes": { |
|
"observation.image": [ |
|
3, |
|
96, |
|
96 |
|
], |
|
"observation.state": [ |
|
2 |
|
] |
|
}, |
|
"mlp_hidden_dim": 1024, |
|
"n_action_pred_token": 7, |
|
"n_obs_steps": 5, |
|
"n_vqvae_training_steps": 20000, |
|
"offset_loss_weight": 10000.0, |
|
"output_normalization_modes": { |
|
"action": "min_max" |
|
}, |
|
"output_shapes": { |
|
"action": [ |
|
2 |
|
] |
|
}, |
|
"pretrained_backbone_weights": null, |
|
"primary_code_loss_weight": 5.0, |
|
"secondary_code_loss_weight": 0.5, |
|
"sequentially_select": false, |
|
"spatial_softmax_num_keypoints": 32, |
|
"use_group_norm": true, |
|
"vision_backbone": "resnet18", |
|
"vqvae_embedding_dim": 256, |
|
"vqvae_enc_hidden_dim": 128, |
|
"vqvae_n_embed": 16 |
|
} |