Transformers
Safetensors
ijepa
Inference Endpoints
ijepa_vitg16_22k / config.json
jmtzt's picture
Upload model
2fb27b6 verified
raw
history blame
513 Bytes
{
"architectures": [
"IJepaModel"
],
"attention_probs_dropout_prob": 0.0,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 1408,
"image_size": 224,
"initializer_range": 0.02,
"intermediate_size": 6144,
"layer_norm_eps": 1e-06,
"mlp_ratio": 4.363636363636363,
"model_type": "ijepa",
"num_attention_heads": 16,
"num_channels": 3,
"num_hidden_layers": 40,
"patch_size": 16,
"qkv_bias": true,
"torch_dtype": "float32",
"transformers_version": "4.47.0.dev0"
}