gemma-2b-flax / flax_model.msgpack.index.json
lodestones's picture
Upload FlaxGemmaForCausalLM
36539ee verified
{
"metadata": {
"total_size": 5012344832
},
"weight_map": {
"model/embed_tokens/embedding": "flax_model-00001-of-00002.msgpack",
"model/layers/0/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/0/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/0/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/0/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/0/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/0/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/0/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/0/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/0/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/1/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/1/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/1/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/1/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/1/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/1/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/1/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/1/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/1/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/10/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/10/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/10/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/10/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/10/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/10/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/10/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/10/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/10/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/11/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/11/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/11/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/11/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/11/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/11/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/11/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/11/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/11/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/12/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/12/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/12/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/12/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/12/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/12/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/12/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/12/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/12/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/13/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/13/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/13/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/13/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/13/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/13/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/13/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/13/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/13/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/14/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/14/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/14/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/14/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/14/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/14/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/14/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/14/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/14/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/15/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/15/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/15/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/15/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/15/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/15/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/15/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/15/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/15/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/16/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/16/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/16/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/16/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/16/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/16/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/16/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/16/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/16/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/17/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/17/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/17/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/17/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/17/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/17/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/17/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/17/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/17/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/2/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/2/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/2/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/2/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/2/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/2/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/2/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/2/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/2/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/3/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/3/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/3/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/3/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/3/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/3/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/3/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/3/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/3/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/4/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/4/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/4/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/4/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/4/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/4/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/4/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/4/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/4/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/5/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/5/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/5/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/5/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/5/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/5/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/5/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/5/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/5/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/6/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/6/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/6/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/6/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/6/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/6/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/6/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/6/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/6/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/7/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/7/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/7/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/7/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/7/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/7/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/7/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/7/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/7/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/8/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/8/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/8/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/8/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/8/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/8/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/8/self_attn/o_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/8/self_attn/q_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/8/self_attn/v_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/9/input_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/9/mlp/down_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/9/mlp/gate_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/9/mlp/up_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/9/post_attention_layernorm/weight": "flax_model-00001-of-00002.msgpack",
"model/layers/9/self_attn/k_proj/kernel": "flax_model-00001-of-00002.msgpack",
"model/layers/9/self_attn/o_proj/kernel": "flax_model-00002-of-00002.msgpack",
"model/layers/9/self_attn/q_proj/kernel": "flax_model-00002-of-00002.msgpack",
"model/layers/9/self_attn/v_proj/kernel": "flax_model-00002-of-00002.msgpack",
"model/norm/weight": "flax_model-00002-of-00002.msgpack"
}
}