|
{ |
|
"metadata": { |
|
"total_size": 32399855616 |
|
}, |
|
"weight_map": { |
|
"decoder.lm_head.bias": "pytorch_model-00004-of-00004.bin", |
|
"decoder.lm_head.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.0.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.0.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.0.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.0.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.0.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.0.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.0.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.0.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.0.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.1.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.1.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.1.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.1.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.1.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.1.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.1.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.1.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.1.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.10.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.10.attn.out_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.10.attn.qkv_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.10.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.10.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.10.mlp.fc_in.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.10.mlp.fc_in.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.10.mlp.fc_out.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.10.mlp.fc_out.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.11.attn.causal_mask": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.11.attn.out_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.11.attn.qkv_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.11.ln_1.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.11.ln_1.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.11.mlp.fc_in.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.11.mlp.fc_in.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.11.mlp.fc_out.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.11.mlp.fc_out.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.12.attn.causal_mask": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.12.attn.out_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.12.attn.qkv_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.12.ln_1.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.12.ln_1.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.12.mlp.fc_in.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.12.mlp.fc_in.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.12.mlp.fc_out.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.12.mlp.fc_out.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.13.attn.causal_mask": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.13.attn.out_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.13.attn.qkv_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.13.ln_1.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.13.ln_1.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.13.mlp.fc_in.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.13.mlp.fc_in.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.13.mlp.fc_out.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.13.mlp.fc_out.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.14.attn.causal_mask": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.14.attn.out_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.14.attn.qkv_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.14.ln_1.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.14.ln_1.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.14.mlp.fc_in.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.14.mlp.fc_in.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.14.mlp.fc_out.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.14.mlp.fc_out.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.15.attn.causal_mask": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.15.attn.out_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.15.attn.qkv_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.15.ln_1.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.15.ln_1.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.15.mlp.fc_in.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.15.mlp.fc_in.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.15.mlp.fc_out.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.15.mlp.fc_out.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.16.attn.causal_mask": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.16.attn.out_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.16.attn.qkv_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.16.ln_1.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.16.ln_1.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.16.mlp.fc_in.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.16.mlp.fc_in.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.16.mlp.fc_out.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.16.mlp.fc_out.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.17.attn.causal_mask": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.17.attn.out_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.17.attn.qkv_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.17.ln_1.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.17.ln_1.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.17.mlp.fc_in.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.17.mlp.fc_in.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.17.mlp.fc_out.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.17.mlp.fc_out.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.18.attn.causal_mask": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.18.attn.out_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.18.attn.qkv_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.18.ln_1.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.18.ln_1.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.18.mlp.fc_in.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.18.mlp.fc_in.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.18.mlp.fc_out.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.18.mlp.fc_out.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.19.attn.causal_mask": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.19.attn.out_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.19.attn.qkv_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.19.ln_1.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.19.ln_1.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.19.mlp.fc_in.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.19.mlp.fc_in.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.19.mlp.fc_out.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.19.mlp.fc_out.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.2.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.2.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.2.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.2.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.2.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.2.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.2.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.2.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.2.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.20.attn.causal_mask": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.20.attn.out_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.20.attn.qkv_proj.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.20.ln_1.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.20.ln_1.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.20.mlp.fc_in.bias": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.20.mlp.fc_in.weight": "pytorch_model-00002-of-00004.bin", |
|
"decoder.transformer.h.20.mlp.fc_out.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.20.mlp.fc_out.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.21.attn.causal_mask": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.21.attn.out_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.21.attn.qkv_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.21.ln_1.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.21.ln_1.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.21.mlp.fc_in.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.21.mlp.fc_in.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.21.mlp.fc_out.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.21.mlp.fc_out.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.22.attn.causal_mask": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.22.attn.out_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.22.attn.qkv_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.22.ln_1.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.22.ln_1.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.22.mlp.fc_in.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.22.mlp.fc_in.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.22.mlp.fc_out.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.22.mlp.fc_out.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.23.attn.causal_mask": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.23.attn.out_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.23.attn.qkv_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.23.ln_1.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.23.ln_1.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.23.mlp.fc_in.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.23.mlp.fc_in.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.23.mlp.fc_out.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.23.mlp.fc_out.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.24.attn.causal_mask": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.24.attn.out_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.24.attn.qkv_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.24.ln_1.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.24.ln_1.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.24.mlp.fc_in.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.24.mlp.fc_in.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.24.mlp.fc_out.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.24.mlp.fc_out.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.25.attn.causal_mask": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.25.attn.out_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.25.attn.qkv_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.25.ln_1.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.25.ln_1.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.25.mlp.fc_in.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.25.mlp.fc_in.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.25.mlp.fc_out.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.25.mlp.fc_out.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.26.attn.causal_mask": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.26.attn.out_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.26.attn.qkv_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.26.ln_1.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.26.ln_1.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.26.mlp.fc_in.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.26.mlp.fc_in.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.26.mlp.fc_out.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.26.mlp.fc_out.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.27.attn.causal_mask": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.27.attn.out_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.27.attn.qkv_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.27.ln_1.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.27.ln_1.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.27.mlp.fc_in.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.27.mlp.fc_in.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.27.mlp.fc_out.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.27.mlp.fc_out.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.28.attn.causal_mask": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.28.attn.out_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.28.attn.qkv_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.28.ln_1.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.28.ln_1.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.28.mlp.fc_in.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.28.mlp.fc_in.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.28.mlp.fc_out.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.28.mlp.fc_out.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.29.attn.causal_mask": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.29.attn.out_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.29.attn.qkv_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.29.ln_1.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.29.ln_1.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.29.mlp.fc_in.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.29.mlp.fc_in.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.29.mlp.fc_out.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.29.mlp.fc_out.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.3.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.3.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.3.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.3.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.3.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.3.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.3.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.3.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.3.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.30.attn.causal_mask": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.30.attn.out_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.30.attn.qkv_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.30.ln_1.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.30.ln_1.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.30.mlp.fc_in.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.30.mlp.fc_in.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.30.mlp.fc_out.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.30.mlp.fc_out.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.31.attn.causal_mask": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.31.attn.out_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.31.attn.qkv_proj.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.31.ln_1.bias": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.31.ln_1.weight": "pytorch_model-00003-of-00004.bin", |
|
"decoder.transformer.h.31.mlp.fc_in.bias": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.31.mlp.fc_in.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.31.mlp.fc_out.bias": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.31.mlp.fc_out.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.32.attn.causal_mask": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.32.attn.out_proj.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.32.attn.qkv_proj.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.32.ln_1.bias": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.32.ln_1.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.32.mlp.fc_in.bias": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.32.mlp.fc_in.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.32.mlp.fc_out.bias": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.32.mlp.fc_out.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.33.attn.causal_mask": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.33.attn.out_proj.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.33.attn.qkv_proj.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.33.crossattention.causal_mask": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.33.crossattention.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.33.crossattention.q_attn.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.33.crossattention.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.33.ln_1.bias": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.33.ln_1.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.33.mlp.fc_in.bias": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.33.mlp.fc_in.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.33.mlp.fc_out.bias": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.33.mlp.fc_out.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.h.4.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.4.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.4.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.4.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.4.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.4.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.4.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.4.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.4.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.5.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.5.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.5.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.5.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.5.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.5.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.5.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.5.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.5.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.6.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.6.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.6.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.6.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.6.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.6.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.6.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.6.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.6.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.7.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.7.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.7.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.7.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.7.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.7.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.7.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.7.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.7.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.8.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.8.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.8.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.8.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.8.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.8.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.8.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.8.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.8.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.9.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.9.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.9.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.9.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.9.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.9.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.9.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.9.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.h.9.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"decoder.transformer.ln_f.bias": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.ln_f.weight": "pytorch_model-00004-of-00004.bin", |
|
"decoder.transformer.wte.weight": "pytorch_model-00001-of-00004.bin", |
|
"enc_to_dec_proj.bias": "pytorch_model-00001-of-00004.bin", |
|
"enc_to_dec_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.0.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.0.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.0.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.0.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.0.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.0.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.0.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.0.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.0.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.1.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.1.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.1.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.1.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.1.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.1.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.1.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.1.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.1.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.10.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.10.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.10.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.10.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.10.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.10.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.10.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.10.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.10.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.11.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.11.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.11.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.11.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.11.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.11.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.11.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.11.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.11.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.12.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.12.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.12.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.12.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.12.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.12.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.12.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.12.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.12.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.13.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.13.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.13.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.13.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.13.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.13.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.13.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.13.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.13.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.14.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.14.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.14.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.14.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.14.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.14.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.14.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.14.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.14.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.15.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.15.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.15.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.15.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.15.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.15.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.15.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.15.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.15.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.16.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.16.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.16.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.16.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.16.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.16.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.16.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.16.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.16.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.17.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.17.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.17.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.17.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.17.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.17.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.17.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.17.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.17.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.18.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.18.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.18.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.18.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.18.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.18.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.18.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.18.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.18.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.19.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.19.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.19.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.19.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.19.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.19.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.19.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.19.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.19.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.2.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.2.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.2.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.2.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.2.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.2.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.2.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.2.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.2.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.3.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.3.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.3.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.3.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.3.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.3.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.3.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.3.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.3.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.4.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.4.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.4.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.4.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.4.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.4.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.4.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.4.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.4.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.5.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.5.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.5.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.5.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.5.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.5.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.5.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.5.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.5.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.6.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.6.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.6.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.6.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.6.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.6.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.6.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.6.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.6.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.7.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.7.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.7.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.7.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.7.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.7.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.7.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.7.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.7.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.8.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.8.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.8.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.8.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.8.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.8.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.8.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.8.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.8.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.9.attn.causal_mask": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.9.attn.out_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.9.attn.qkv_proj.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.9.ln_1.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.9.ln_1.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.9.mlp.fc_in.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.9.mlp.fc_in.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.9.mlp.fc_out.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.h.9.mlp.fc_out.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.ln_f.bias": "pytorch_model-00001-of-00004.bin", |
|
"encoder.ln_f.weight": "pytorch_model-00001-of-00004.bin", |
|
"encoder.wte.weight": "pytorch_model-00001-of-00004.bin" |
|
} |
|
} |
|
|