{ "emb_size": 1024, "feedforward_size": 4096, "hidden_size": 1024, "hidden_act": "gelu", "heads_num": 16, "layers_num": 24, "dropout": 0.1, "max_seq_length": 50, "data_processor": "vit", "embedding": ["patch", "pos"], "remove_embedding_layernorm": true, "encoder": "transformer", "mask": "fully_visible", "layernorm_positioning": "pre", "target": ["cls"], "image_height": 224, "image_width": 224, "patch_size": 32 }