Rename pytorch_model.bin.index.json to model.bin.index.json

#2
pytorch_model.bin.index.json → model.bin.index.json RENAMED
@@ -189,7 +189,7 @@
189
  "model.layers.22.mlp.down_proj.weight": "pytorch_model-00009-of-00031.bin",
190
  "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00009-of-00031.bin",
191
  "model.layers.22.mlp.up_proj.weight": "pytorch_model-00009-of-00031.bin",
192
- "model.layers.22.post_attention_layernorm.weight": "pytorch_model-00009-of-00031.bin",
193
  "model.layers.22.self_attn.k_proj.bias": "pytorch_model-00009-of-00031.bin",
194
  "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00009-of-00031.bin",
195
  "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00009-of-00031.bin",
@@ -556,7 +556,7 @@
556
  "model.layers.5.self_attn.q_proj.bias": "pytorch_model-00003-of-00031.bin",
557
  "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00003-of-00031.bin",
558
  "model.layers.5.self_attn.v_proj.bias": "pytorch_model-00003-of-00031.bin",
559
- "model.layers.5.self_attn.v_proj.weight": "pytorch_model-00003-of-00031.bin",
560
  "model.layers.50.input_layernorm.weight": "pytorch_model-00020-of-00031.bin",
561
  "model.layers.50.mlp.down_proj.weight": "pytorch_model-00020-of-00031.bin",
562
  "model.layers.50.mlp.gate_proj.weight": "pytorch_model-00019-of-00031.bin",
 
189
  "model.layers.22.mlp.down_proj.weight": "pytorch_model-00009-of-00031.bin",
190
  "model.layers.22.mlp.gate_proj.weight": "pytorch_model-00009-of-00031.bin",
191
  "model.layers.22.mlp.up_proj.weight": "pytorch_model-00009-of-00031.bin",
192
+ "model.layers.22.post_attentiopytorch_n_layernorm.weight": "pytorch_model-00009-of-00031.bin",
193
  "model.layers.22.self_attn.k_proj.bias": "pytorch_model-00009-of-00031.bin",
194
  "model.layers.22.self_attn.k_proj.weight": "pytorch_model-00009-of-00031.bin",
195
  "model.layers.22.self_attn.o_proj.weight": "pytorch_model-00009-of-00031.bin",
 
556
  "model.layers.5.self_attn.q_proj.bias": "pytorch_model-00003-of-00031.bin",
557
  "model.layers.5.self_attn.q_proj.weight": "pytorch_model-00003-of-00031.bin",
558
  "model.layers.5.self_attn.v_proj.bias": "pytorch_model-00003-of-00031.bin",
559
+ "model.layers.5.self_attn.v_propytorch_j.weight": "pytorch_model-00003-of-00031.bin",
560
  "model.layers.50.input_layernorm.weight": "pytorch_model-00020-of-00031.bin",
561
  "model.layers.50.mlp.down_proj.weight": "pytorch_model-00020-of-00031.bin",
562
  "model.layers.50.mlp.gate_proj.weight": "pytorch_model-00019-of-00031.bin",