File size: 530 Bytes
3ce4d0a 9347243 3ce4d0a |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
_name_or_path: "TheBloke/Llama-2-7B-fp16",
architectures: [
"LlamaForCausalLM"
],
bos_token_id: 1,
eos_token_id: 2,
hidden_act: "silu",
hidden_size: 4096,
initializer_range: 0.02,
intermediate_size: 11008,
max_position_embeddings: 2048,
model_type: "llama",
num_attention_heads: 32,
num_hidden_layers: 32,
num_key_value_heads: 32,
pad_token_id: 0,
pretraining_tp: 1,
rms_norm_eps: 0.00001,
rope_scaling: null,
tie_word_embeddings: false,
torch_dtype: "float32",
transformers_version: "4.31.0",
use_cache: true,
vocab_size: 32000
} |