|
{ |
|
"metadata": { |
|
"total_size": 5678836480 |
|
}, |
|
"weight_map": { |
|
"lm_head.adapter_bias": "model-00001-of-00002.safetensors", |
|
"lm_head.adapter_scale": "model-00001-of-00002.safetensors", |
|
"lm_head.linear.bias": "model-00001-of-00002.safetensors", |
|
"lm_head.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.0.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.1.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.10.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.11.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.12.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.13.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.14.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.15.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.16.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.17.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.18.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.19.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.2.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.mlp.fc.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.20.mlp.fc.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.20.mlp.proj.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.20.mlp.proj.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.20.mlp.proj.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.20.mlp.proj.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.20.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.20.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.21.attn.adapter_wte.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.attn.attn.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.attn.attn.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.attn.attn.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.attn.attn.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.attn.gating_factor": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.attn.proj.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.attn.proj.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.attn.proj.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.attn.proj.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.mlp.fc.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.mlp.fc.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.mlp.fc.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.mlp.fc.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.mlp.proj.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.mlp.proj.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.mlp.proj.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.mlp.proj.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.norm_1.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.21.norm_1.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.attn.adapter_wte.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.attn.attn.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.attn.attn.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.attn.attn.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.attn.attn.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.attn.gating_factor": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.attn.proj.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.attn.proj.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.attn.proj.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.attn.proj.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.mlp.fc.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.mlp.fc.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.mlp.fc.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.mlp.fc.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.mlp.proj.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.mlp.proj.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.mlp.proj.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.mlp.proj.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.norm_1.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.22.norm_1.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.attn.adapter_wte.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.attn.attn.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.attn.attn.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.attn.attn.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.attn.attn.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.attn.gating_factor": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.attn.proj.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.attn.proj.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.attn.proj.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.attn.proj.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.mlp.fc.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.mlp.fc.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.mlp.fc.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.mlp.fc.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.mlp.proj.adapter_bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.mlp.proj.adapter_scale": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.mlp.proj.linear.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.mlp.proj.linear.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.norm_1.bias": "model-00002-of-00002.safetensors", |
|
"transformer.h.23.norm_1.weight": "model-00002-of-00002.safetensors", |
|
"transformer.h.3.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.3.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.4.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.5.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.6.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.7.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.8.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.attn.adapter_wte.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.attn.attn.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.attn.attn.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.attn.attn.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.attn.attn.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.attn.gating_factor": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.attn.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.attn.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.attn.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.attn.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.mlp.fc.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.mlp.fc.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.mlp.fc.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.mlp.fc.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.mlp.proj.adapter_bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.mlp.proj.adapter_scale": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.mlp.proj.linear.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.mlp.proj.linear.weight": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.norm_1.bias": "model-00001-of-00002.safetensors", |
|
"transformer.h.9.norm_1.weight": "model-00001-of-00002.safetensors", |
|
"transformer.ln_f.bias": "model-00002-of-00002.safetensors", |
|
"transformer.ln_f.weight": "model-00002-of-00002.safetensors", |
|
"transformer.wte.weight": "model-00001-of-00002.safetensors" |
|
} |
|
} |
|
|