amr_my_llm_finetuned / config.json
royam0820's picture
update config.json
9347243
raw
history blame contribute delete
530 Bytes
_name_or_path: "TheBloke/Llama-2-7B-fp16",
architectures: [
"LlamaForCausalLM"
],
bos_token_id: 1,
eos_token_id: 2,
hidden_act: "silu",
hidden_size: 4096,
initializer_range: 0.02,
intermediate_size: 11008,
max_position_embeddings: 2048,
model_type: "llama",
num_attention_heads: 32,
num_hidden_layers: 32,
num_key_value_heads: 32,
pad_token_id: 0,
pretraining_tp: 1,
rms_norm_eps: 0.00001,
rope_scaling: null,
tie_word_embeddings: false,
torch_dtype: "float32",
transformers_version: "4.31.0",
use_cache: true,
vocab_size: 32000
}