{ | |
"model_type": "llama", | |
"hidden_size": 4096, | |
"num_attention_heads": 32, | |
"num_hidden_layers": 16, | |
"intermediate_size": 11008, | |
"hidden_act": "silu", | |
"initializer_range": 0.02, | |
"layer_norm_eps": 1e-5, | |
"vocab_size": 32000, | |
"max_position_embeddings": 2048, | |
"rms_norm_eps": 1e-6, | |
"tie_word_embeddings": false, | |
"use_cache": true, | |
"pad_token_id": 0, | |
"bos_token_id": 1, | |
"eos_token_id": 2, | |
"flash_norm_params": { | |
"scaling_factor": 0.5, | |
"use_batch_norm": false | |
} | |
} | |