{ | |
"freeze-vision": true, | |
"language-model": { | |
"hidden_size": 512, | |
"intermediate_size": 1024, | |
"max_position_embeddings": 512, | |
"num_attention_heads": 8, | |
"num_hidden_layers": 16, | |
"num_key_value_heads": 8, | |
"vocab_size": 16000 | |
}, | |
"phase": 4, | |
"tokenizer": "tokenizers/bpe_babylm_100m_v2" | |
} |