VISOR-GPT / train /models /vit /huge-14-224_config.json
szukevin's picture
upload
7900c16
raw
history blame
452 Bytes
{
"emb_size": 1280,
"feedforward_size": 5120,
"hidden_size": 1280,
"hidden_act": "gelu",
"heads_num": 16,
"layers_num": 32,
"dropout": 0.1,
"max_seq_length": 257,
"data_processor": "vit",
"embedding": ["patch", "pos"],
"remove_embedding_layernorm": true,
"encoder": "transformer",
"mask": "fully_visible",
"layernorm_positioning": "pre",
"target": ["cls"],
"image_height": 224,
"image_width": 224,
"patch_size": 14
}