GPT-NEO-1.3B-wiki / model.safetensors.index.json
Kimargin's picture
Upload GPTNeoForCausalLM
ff94b14 verified
raw
history blame
25.4 kB
{
"metadata": {
"total_size": 5262311424
},
"weight_map": {
"transformer.h.0.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.0.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.0.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.0.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.0.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.0.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.1.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.1.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.1.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.1.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.1.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.10.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.10.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.10.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.10.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.10.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.11.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.11.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.11.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.11.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.11.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.12.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.12.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.12.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.12.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.12.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.13.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.13.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.13.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.13.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.13.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.14.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.14.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.14.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.14.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.14.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.15.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.15.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.15.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.15.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.15.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.16.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.16.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.16.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.16.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.16.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.17.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.17.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.17.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.17.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.17.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.17.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.17.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.17.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.17.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.17.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.17.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.17.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.17.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.18.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.18.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.18.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.18.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.18.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.18.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.18.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.18.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.18.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.18.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.18.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.18.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.18.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.19.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.19.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.19.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.19.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.19.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.19.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.19.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.19.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.19.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.19.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.19.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.19.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.19.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.2.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.2.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.2.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.2.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.2.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.20.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.20.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.20.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.20.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.20.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.20.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.20.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.20.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.20.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.20.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.20.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.20.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.20.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.21.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.21.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.21.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.21.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.21.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.21.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.21.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.21.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.21.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.21.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.21.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.21.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.21.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.22.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.22.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.22.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.22.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.22.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.22.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.22.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.22.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.22.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.22.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.22.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.22.mlp.c_proj.bias": "model-00002-of-00002.safetensors",
"transformer.h.22.mlp.c_proj.weight": "model-00002-of-00002.safetensors",
"transformer.h.23.attn.attention.k_proj.weight": "model-00002-of-00002.safetensors",
"transformer.h.23.attn.attention.out_proj.bias": "model-00002-of-00002.safetensors",
"transformer.h.23.attn.attention.out_proj.weight": "model-00002-of-00002.safetensors",
"transformer.h.23.attn.attention.q_proj.weight": "model-00002-of-00002.safetensors",
"transformer.h.23.attn.attention.v_proj.weight": "model-00002-of-00002.safetensors",
"transformer.h.23.ln_1.bias": "model-00002-of-00002.safetensors",
"transformer.h.23.ln_1.weight": "model-00002-of-00002.safetensors",
"transformer.h.23.ln_2.bias": "model-00002-of-00002.safetensors",
"transformer.h.23.ln_2.weight": "model-00002-of-00002.safetensors",
"transformer.h.23.mlp.c_fc.bias": "model-00002-of-00002.safetensors",
"transformer.h.23.mlp.c_fc.weight": "model-00002-of-00002.safetensors",
"transformer.h.23.mlp.c_proj.bias": "model-00002-of-00002.safetensors",
"transformer.h.23.mlp.c_proj.weight": "model-00002-of-00002.safetensors",
"transformer.h.3.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.3.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.3.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.3.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.3.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.3.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.4.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.4.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.4.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.4.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.4.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.5.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.5.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.5.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.5.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.5.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.6.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.6.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.6.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.6.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.6.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.7.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.7.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.7.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.7.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.7.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.8.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.8.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.8.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.8.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.8.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.attention.k_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.attention.out_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.attention.out_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.attention.q_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.attn.attention.v_proj.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.ln_1.bias": "model-00001-of-00002.safetensors",
"transformer.h.9.ln_1.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.ln_2.bias": "model-00001-of-00002.safetensors",
"transformer.h.9.ln_2.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.mlp.c_fc.bias": "model-00001-of-00002.safetensors",
"transformer.h.9.mlp.c_fc.weight": "model-00001-of-00002.safetensors",
"transformer.h.9.mlp.c_proj.bias": "model-00001-of-00002.safetensors",
"transformer.h.9.mlp.c_proj.weight": "model-00001-of-00002.safetensors",
"transformer.ln_f.bias": "model-00002-of-00002.safetensors",
"transformer.ln_f.weight": "model-00002-of-00002.safetensors",
"transformer.wpe.weight": "model-00001-of-00002.safetensors",
"transformer.wte.weight": "model-00001-of-00002.safetensors"
}
}