ehristoforu commited on
Commit
3a5870b
1 Parent(s): 56585ae

Upload folder using huggingface_hub

Browse files
Files changed (49) hide show
  1. .gitattributes +1 -0
  2. added_tokens.json +24 -0
  3. config.json +38 -0
  4. mergekit_moe_config.yml +21 -0
  5. merges.txt +0 -0
  6. model-00001-of-00039.safetensors +3 -0
  7. model-00002-of-00039.safetensors +3 -0
  8. model-00003-of-00039.safetensors +3 -0
  9. model-00004-of-00039.safetensors +3 -0
  10. model-00005-of-00039.safetensors +3 -0
  11. model-00006-of-00039.safetensors +3 -0
  12. model-00007-of-00039.safetensors +3 -0
  13. model-00008-of-00039.safetensors +3 -0
  14. model-00009-of-00039.safetensors +3 -0
  15. model-00010-of-00039.safetensors +3 -0
  16. model-00011-of-00039.safetensors +3 -0
  17. model-00012-of-00039.safetensors +3 -0
  18. model-00013-of-00039.safetensors +3 -0
  19. model-00014-of-00039.safetensors +3 -0
  20. model-00015-of-00039.safetensors +3 -0
  21. model-00016-of-00039.safetensors +3 -0
  22. model-00017-of-00039.safetensors +3 -0
  23. model-00018-of-00039.safetensors +3 -0
  24. model-00019-of-00039.safetensors +3 -0
  25. model-00020-of-00039.safetensors +3 -0
  26. model-00021-of-00039.safetensors +3 -0
  27. model-00022-of-00039.safetensors +3 -0
  28. model-00023-of-00039.safetensors +3 -0
  29. model-00024-of-00039.safetensors +3 -0
  30. model-00025-of-00039.safetensors +3 -0
  31. model-00026-of-00039.safetensors +3 -0
  32. model-00027-of-00039.safetensors +3 -0
  33. model-00028-of-00039.safetensors +3 -0
  34. model-00029-of-00039.safetensors +3 -0
  35. model-00030-of-00039.safetensors +3 -0
  36. model-00031-of-00039.safetensors +3 -0
  37. model-00032-of-00039.safetensors +3 -0
  38. model-00033-of-00039.safetensors +3 -0
  39. model-00034-of-00039.safetensors +3 -0
  40. model-00035-of-00039.safetensors +3 -0
  41. model-00036-of-00039.safetensors +3 -0
  42. model-00037-of-00039.safetensors +3 -0
  43. model-00038-of-00039.safetensors +3 -0
  44. model-00039-of-00039.safetensors +3 -0
  45. model.safetensors.index.json +1 -0
  46. special_tokens_map.json +25 -0
  47. tokenizer.json +3 -0
  48. tokenizer_config.json +211 -0
  49. vocab.json +0 -0
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
added_tokens.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "</tool_call>": 151658,
3
+ "<tool_call>": 151657,
4
+ "<|box_end|>": 151649,
5
+ "<|box_start|>": 151648,
6
+ "<|endoftext|>": 151643,
7
+ "<|file_sep|>": 151664,
8
+ "<|fim_middle|>": 151660,
9
+ "<|fim_pad|>": 151662,
10
+ "<|fim_prefix|>": 151659,
11
+ "<|fim_suffix|>": 151661,
12
+ "<|im_end|>": 151645,
13
+ "<|im_start|>": 151644,
14
+ "<|image_pad|>": 151655,
15
+ "<|object_ref_end|>": 151647,
16
+ "<|object_ref_start|>": 151646,
17
+ "<|quad_end|>": 151651,
18
+ "<|quad_start|>": 151650,
19
+ "<|repo_name|>": 151663,
20
+ "<|video_pad|>": 151656,
21
+ "<|vision_end|>": 151653,
22
+ "<|vision_pad|>": 151654,
23
+ "<|vision_start|>": 151652
24
+ }
config.json ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "huihui-ai/Qwen2.5-7B-Instruct-abliterated-v2",
3
+ "architectures": [
4
+ "Qwen2MoeForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "bos_token_id": 151643,
8
+ "decoder_sparse_step": 1,
9
+ "eos_token_id": 151645,
10
+ "hidden_act": "silu",
11
+ "hidden_size": 3584,
12
+ "initializer_range": 0.02,
13
+ "intermediate_size": 18944,
14
+ "max_position_embeddings": 32768,
15
+ "max_window_layers": 28,
16
+ "mlp_only_layers": [],
17
+ "model_type": "qwen2_moe",
18
+ "moe_intermediate_size": 18944,
19
+ "norm_topk_prob": true,
20
+ "num_attention_heads": 28,
21
+ "num_experts": 4,
22
+ "num_experts_per_tok": 2,
23
+ "num_hidden_layers": 28,
24
+ "num_key_value_heads": 4,
25
+ "output_router_logits": false,
26
+ "rms_norm_eps": 1e-06,
27
+ "rope_scaling": null,
28
+ "rope_theta": 1000000.0,
29
+ "router_aux_loss_coef": 0.001,
30
+ "shared_expert_intermediate_size": 18944,
31
+ "sliding_window": null,
32
+ "tie_word_embeddings": false,
33
+ "torch_dtype": "bfloat16",
34
+ "transformers_version": "4.46.2",
35
+ "use_cache": true,
36
+ "use_sliding_window": false,
37
+ "vocab_size": 152064
38
+ }
mergekit_moe_config.yml ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ base_model: huihui-ai/Qwen2.5-7B-Instruct-abliterated-v2
3
+ architecture: qwen
4
+ gate_mode: hidden
5
+ dtype: bfloat16
6
+ experts:
7
+ - source_model: huihui-ai/Qwen2.5-7B-Instruct-abliterated-v2
8
+ positive_prompts: ["chat", "assistant", "chat history", "chat context", "writing", "text writing", "editing", "text editing", "multilingual"]
9
+ - source_model: Qwen/Qwen2.5-Math-1.5B-Instruct
10
+ positive_prompts: ["bio", "science", "biology", "natural sciences", "scientist", "math", "mathematician", "problem solving", "calculating", "logics"]
11
+ - source_model: Qwen/Qwen2.5-Coder-3B-Instruct
12
+ positive_prompts: ["code", "coding", "coder", "programming", "programmer", "code analysis", "code review", "code fix", "code improvement"]
13
+ - source_model: RefalMachine/ruadapt_qwen2.5_3B_ext_u48_instruct_v4
14
+ positive_prompts: ["russian chat", "russian chatting", "russian", "russian language", "russian text writing/editing"]
15
+ shared_experts:
16
+ - source_model: huihui-ai/Qwen2.5-7B-Instruct-abliterated-v2
17
+ positive_prompts: # required by Qwen MoE for "hidden" gate mode, otherwise not allowed
18
+ - "chat assistant"
19
+ # (optional, but recommended:)
20
+ residual_scale: 0.1 # downweight output from shared expert to prevent overcooking the model
21
+
merges.txt ADDED
The diff for this file is too large to render. See raw diff
 
model-00001-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01845180ac3f6a4a8110cedd8c044d449b54237c22b7f878b4b35fa6763a4270
3
+ size 1089994896
model-00002-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a72e3134368907253d887ac91b9b11e5e805310402e5b3c1fef960c73cb036dd
3
+ size 986981064
model-00003-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6227af32a64401313e4d7745768f5a9bb6579cb966cb175a2072d6e74d18d67
3
+ size 882402648
model-00004-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d705a56272db1189bb910ae5ee18c1ef654033afbe3acf439c0706030a304dd4
3
+ size 973104472
model-00005-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7498d0064481094e95729f67dc5718998f59804d7fe2c30e63fa0bc6c6d55d1f
3
+ size 973104472
model-00006-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51a9d4f1a5838b227d309e3365d4369e6692d8815f2e0ae362c76bf663bba1f7
3
+ size 896271968
model-00007-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52f061f151b17e8b662a2690896dd4972e5d421fe8514b1292b0133efb1ec275
3
+ size 973104480
model-00008-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5cbaa82595301dd5d1a37070a8c244b40ae778a0c0a2f439476d56bb67dc85d
3
+ size 973104472
model-00009-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30f3c3c399dea7c05fcd8ac9c3ec70ebaa7066dbc30be81d93c0d2fc04aeafb3
3
+ size 955016784
model-00010-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca5236dbc5d39d1342f653b8d59dfab7cb72aa8ea441a7296ec0e02b852fc37e
3
+ size 973104472
model-00011-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1d9521a80824cf7a3601ee5c61773d469db288921655c47041e9f855ca6156c1
3
+ size 986973792
model-00012-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40ec5aa208de3e55e8315d775d7fc43e26593c99b625fcbe9295cf3084096169
3
+ size 882402648
model-00013-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:719b2ca8bb0b16088fcdfdf37a44dd5b16ddeab168859a122c52f6b36bac6104
3
+ size 973104472
model-00014-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8db3c6a6e97549065855996b3bdcb94e324da8e2589ce9f2be0c367beac22cd1
3
+ size 973104464
model-00015-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7aa077686944cc0780a760bb87ab9e94f50a3bb9d69c340ed3888f0f04ee6f57
3
+ size 896271984
model-00016-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15be6d42d30b0bcca40293e05533ba9f36b5e8b27d9ccd2eb6f36d229e7dd1a0
3
+ size 973104504
model-00017-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:380bc14a2d18914bdc04103c8096cd303800bff52a6a6624320785ff8b011427
3
+ size 973104488
model-00018-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a0eff426f01292a2061a7858c4dfe3b0a694cdf5ddba5c0b93ea417a65d1fd14
3
+ size 955016800
model-00019-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5db3f002ad10efe000236c0f294b9149d089bd449c10526a9c74defadc68e04d
3
+ size 973104496
model-00020-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:55ffdc920a8bfcde4d684ef1d2a9d09808d8ca884e32b29915774a9eb2bb8360
3
+ size 986973808
model-00021-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e14ab70a489477cbfac968575bf57c1da5065786addbfdea2f87986320166bae
3
+ size 882402672
model-00022-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:26d079648038245c989f9e1f41a1cd849cce30b688ccf8d20120f6a02c598282
3
+ size 973104496
model-00023-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8275e26d050b6cb254daca41ff183112780d994c2a6dbecedd9d5890b165c019
3
+ size 973104488
model-00024-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc2388e6839404a123c7b391ab92172314f10627d3424f21a3760b3524b3a6f9
3
+ size 896271984
model-00025-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c05ca4eb8356319f4931783d9dd1dc2bf31c1d09b5fc1c5b831e20a60c83e64
3
+ size 973104504
model-00026-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80c96ce35a3ab49b774670ff14cc1ba18e6393b8ad89dc3629b4eca38ff00dc6
3
+ size 973104488
model-00027-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4130ae55e0ebc0e48947a1fbee3b21b7cfc83c5fea14240a361ccd614f21b449
3
+ size 955016800
model-00028-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47b4f2b173ad22b08d5eaf648046ebf24d3de86bc64372e2a917caeb0fcd5202
3
+ size 973104496
model-00029-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6089378edc3bb79b0895adf6da7b1473f78b4fc1b2d35209e97462010be64382
3
+ size 986973808
model-00030-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9740be09c08c079b95c6eb37324a65a3858564f81eb8fa733255ae0f1134db41
3
+ size 882402672
model-00031-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:809e2456a0a09d354b5a7ed9b7f05f39ec147e8eddf46ef98d724d8b29518c24
3
+ size 973104496
model-00032-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e080e130a9669dfc52ffff54e60663970294c7dad21c1eb0d7bd4e6cf524e7e5
3
+ size 973104488
model-00033-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc489427b1d86b8684107ffbe5fd1db51c3c799342da022b3de8cc34f49473a9
3
+ size 896271984
model-00034-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12244c404cfb7b207428e730d1cd30a198dfce268106f596af032e863ef22f58
3
+ size 973104504
model-00035-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a05b82a3ec378a65b5b7c745d4f127620624e658a49786c2c95bfbf6bf79355d
3
+ size 973104488
model-00036-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:41af5790b1cafaebfac677e0a9b67e19ca5efb29df38b11385ec78861831d942
3
+ size 955016800
model-00037-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17bd14ad25809abe3d6521cf1b907e47db1dc4256653a86e2591a58a834506aa
3
+ size 973104472
model-00038-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:06006972c3be88e8a44fe21cfe2b0472b130780c781a741f8f90f1fe5ba3aae2
3
+ size 1089994880
model-00039-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ffcb546461f507e216509a33014b7e02e213ecee6640267e85c80abf143732a
3
+ size 1009464
model.safetensors.index.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"metadata": {"mergekit_version": "0.0.5.1", "total_size": 36525669376}, "weight_map": {"model.embed_tokens.weight": "model-00001-of-00039.safetensors", "model.layers.0.input_layernorm.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.experts.0.down_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.experts.1.down_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.experts.2.down_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.experts.3.down_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.shared_expert.down_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.experts.0.gate_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.experts.1.gate_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.experts.2.gate_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.experts.3.gate_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.shared_expert.gate_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.experts.0.up_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.experts.1.up_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.experts.2.up_proj.weight": "model-00002-of-00039.safetensors", "model.layers.0.mlp.experts.3.up_proj.weight": "model-00003-of-00039.safetensors", "model.layers.0.mlp.shared_expert.up_proj.weight": "model-00003-of-00039.safetensors", "model.layers.0.post_attention_layernorm.weight": "model-00003-of-00039.safetensors", "model.layers.0.self_attn.k_proj.bias": "model-00003-of-00039.safetensors", "model.layers.0.self_attn.k_proj.weight": "model-00003-of-00039.safetensors", "model.layers.0.self_attn.o_proj.weight": "model-00003-of-00039.safetensors", "model.layers.0.self_attn.q_proj.bias": "model-00003-of-00039.safetensors", "model.layers.0.self_attn.q_proj.weight": "model-00003-of-00039.safetensors", "model.layers.0.self_attn.v_proj.bias": "model-00003-of-00039.safetensors", "model.layers.0.self_attn.v_proj.weight": "model-00003-of-00039.safetensors", "model.layers.1.input_layernorm.weight": "model-00003-of-00039.safetensors", "model.layers.1.mlp.experts.0.down_proj.weight": "model-00003-of-00039.safetensors", "model.layers.1.mlp.experts.1.down_proj.weight": "model-00003-of-00039.safetensors", "model.layers.1.mlp.experts.2.down_proj.weight": "model-00003-of-00039.safetensors", "model.layers.1.mlp.experts.3.down_proj.weight": "model-00003-of-00039.safetensors", "model.layers.1.mlp.shared_expert.down_proj.weight": "model-00003-of-00039.safetensors", "model.layers.1.mlp.experts.0.gate_proj.weight": "model-00003-of-00039.safetensors", "model.layers.1.mlp.experts.1.gate_proj.weight": "model-00003-of-00039.safetensors", "model.layers.1.mlp.experts.2.gate_proj.weight": "model-00003-of-00039.safetensors", "model.layers.1.mlp.experts.3.gate_proj.weight": "model-00003-of-00039.safetensors", "model.layers.1.mlp.shared_expert.gate_proj.weight": "model-00004-of-00039.safetensors", "model.layers.1.mlp.experts.0.up_proj.weight": "model-00004-of-00039.safetensors", "model.layers.1.mlp.experts.1.up_proj.weight": "model-00004-of-00039.safetensors", "model.layers.1.mlp.experts.2.up_proj.weight": "model-00004-of-00039.safetensors", "model.layers.1.mlp.experts.3.up_proj.weight": "model-00004-of-00039.safetensors", "model.layers.1.mlp.shared_expert.up_proj.weight": "model-00004-of-00039.safetensors", "model.layers.1.post_attention_layernorm.weight": "model-00004-of-00039.safetensors", "model.layers.1.self_attn.k_proj.bias": "model-00004-of-00039.safetensors", "model.layers.1.self_attn.k_proj.weight": "model-00004-of-00039.safetensors", "model.layers.1.self_attn.o_proj.weight": "model-00004-of-00039.safetensors", "model.layers.1.self_attn.q_proj.bias": "model-00004-of-00039.safetensors", "model.layers.1.self_attn.q_proj.weight": "model-00004-of-00039.safetensors", "model.layers.1.self_attn.v_proj.bias": "model-00004-of-00039.safetensors", "model.layers.1.self_attn.v_proj.weight": "model-00004-of-00039.safetensors", "model.layers.2.input_layernorm.weight": "model-00004-of-00039.safetensors", "model.layers.2.mlp.experts.0.down_proj.weight": "model-00004-of-00039.safetensors", "model.layers.2.mlp.experts.1.down_proj.weight": "model-00004-of-00039.safetensors", "model.layers.2.mlp.experts.2.down_proj.weight": "model-00004-of-00039.safetensors", "model.layers.2.mlp.experts.3.down_proj.weight": "model-00004-of-00039.safetensors", "model.layers.2.mlp.shared_expert.down_proj.weight": "model-00004-of-00039.safetensors", "model.layers.2.mlp.experts.0.gate_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.mlp.experts.1.gate_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.mlp.experts.2.gate_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.mlp.experts.3.gate_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.mlp.shared_expert.gate_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.mlp.experts.0.up_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.mlp.experts.1.up_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.mlp.experts.2.up_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.mlp.experts.3.up_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.mlp.shared_expert.up_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.post_attention_layernorm.weight": "model-00005-of-00039.safetensors", "model.layers.2.self_attn.k_proj.bias": "model-00005-of-00039.safetensors", "model.layers.2.self_attn.k_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.self_attn.o_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.self_attn.q_proj.bias": "model-00005-of-00039.safetensors", "model.layers.2.self_attn.q_proj.weight": "model-00005-of-00039.safetensors", "model.layers.2.self_attn.v_proj.bias": "model-00005-of-00039.safetensors", "model.layers.2.self_attn.v_proj.weight": "model-00005-of-00039.safetensors", "model.layers.3.input_layernorm.weight": "model-00005-of-00039.safetensors", "model.layers.3.mlp.experts.0.down_proj.weight": "model-00005-of-00039.safetensors", "model.layers.3.mlp.experts.1.down_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.experts.2.down_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.experts.3.down_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.shared_expert.down_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.experts.0.gate_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.experts.1.gate_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.experts.2.gate_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.experts.3.gate_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.shared_expert.gate_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.experts.0.up_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.experts.1.up_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.experts.2.up_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.experts.3.up_proj.weight": "model-00006-of-00039.safetensors", "model.layers.3.mlp.shared_expert.up_proj.weight": "model-00007-of-00039.safetensors", "model.layers.3.post_attention_layernorm.weight": "model-00007-of-00039.safetensors", "model.layers.3.self_attn.k_proj.bias": "model-00007-of-00039.safetensors", "model.layers.3.self_attn.k_proj.weight": "model-00007-of-00039.safetensors", "model.layers.3.self_attn.o_proj.weight": "model-00007-of-00039.safetensors", "model.layers.3.self_attn.q_proj.bias": "model-00007-of-00039.safetensors", "model.layers.3.self_attn.q_proj.weight": "model-00007-of-00039.safetensors", "model.layers.3.self_attn.v_proj.bias": "model-00007-of-00039.safetensors", "model.layers.3.self_attn.v_proj.weight": "model-00007-of-00039.safetensors", "model.layers.4.input_layernorm.weight": "model-00007-of-00039.safetensors", "model.layers.4.mlp.experts.0.down_proj.weight": "model-00007-of-00039.safetensors", "model.layers.4.mlp.experts.1.down_proj.weight": "model-00007-of-00039.safetensors", "model.layers.4.mlp.experts.2.down_proj.weight": "model-00007-of-00039.safetensors", "model.layers.4.mlp.experts.3.down_proj.weight": "model-00007-of-00039.safetensors", "model.layers.4.mlp.shared_expert.down_proj.weight": "model-00007-of-00039.safetensors", "model.layers.4.mlp.experts.0.gate_proj.weight": "model-00007-of-00039.safetensors", "model.layers.4.mlp.experts.1.gate_proj.weight": "model-00007-of-00039.safetensors", "model.layers.4.mlp.experts.2.gate_proj.weight": "model-00007-of-00039.safetensors", "model.layers.4.mlp.experts.3.gate_proj.weight": "model-00007-of-00039.safetensors", "model.layers.4.mlp.shared_expert.gate_proj.weight": "model-00007-of-00039.safetensors", "model.layers.4.mlp.experts.0.up_proj.weight": "model-00008-of-00039.safetensors", "model.layers.4.mlp.experts.1.up_proj.weight": "model-00008-of-00039.safetensors", "model.layers.4.mlp.experts.2.up_proj.weight": "model-00008-of-00039.safetensors", "model.layers.4.mlp.experts.3.up_proj.weight": "model-00008-of-00039.safetensors", "model.layers.4.mlp.shared_expert.up_proj.weight": "model-00008-of-00039.safetensors", "model.layers.4.post_attention_layernorm.weight": "model-00008-of-00039.safetensors", "model.layers.4.self_attn.k_proj.bias": "model-00008-of-00039.safetensors", "model.layers.4.self_attn.k_proj.weight": "model-00008-of-00039.safetensors", "model.layers.4.self_attn.o_proj.weight": "model-00008-of-00039.safetensors", "model.layers.4.self_attn.q_proj.bias": "model-00008-of-00039.safetensors", "model.layers.4.self_attn.q_proj.weight": "model-00008-of-00039.safetensors", "model.layers.4.self_attn.v_proj.bias": "model-00008-of-00039.safetensors", "model.layers.4.self_attn.v_proj.weight": "model-00008-of-00039.safetensors", "model.layers.5.input_layernorm.weight": "model-00008-of-00039.safetensors", "model.layers.5.mlp.experts.0.down_proj.weight": "model-00008-of-00039.safetensors", "model.layers.5.mlp.experts.1.down_proj.weight": "model-00008-of-00039.safetensors", "model.layers.5.mlp.experts.2.down_proj.weight": "model-00008-of-00039.safetensors", "model.layers.5.mlp.experts.3.down_proj.weight": "model-00008-of-00039.safetensors", "model.layers.5.mlp.shared_expert.down_proj.weight": "model-00008-of-00039.safetensors", "model.layers.5.mlp.experts.0.gate_proj.weight": "model-00008-of-00039.safetensors", "model.layers.5.mlp.experts.1.gate_proj.weight": "model-00009-of-00039.safetensors", "model.layers.5.mlp.experts.2.gate_proj.weight": "model-00009-of-00039.safetensors", "model.layers.5.mlp.experts.3.gate_proj.weight": "model-00009-of-00039.safetensors", "model.layers.5.mlp.shared_expert.gate_proj.weight": "model-00009-of-00039.safetensors", "model.layers.5.mlp.experts.0.up_proj.weight": "model-00009-of-00039.safetensors", "model.layers.5.mlp.experts.1.up_proj.weight": "model-00009-of-00039.safetensors", "model.layers.5.mlp.experts.2.up_proj.weight": "model-00009-of-00039.safetensors", "model.layers.5.mlp.experts.3.up_proj.weight": "model-00009-of-00039.safetensors", "model.layers.5.mlp.shared_expert.up_proj.weight": "model-00009-of-00039.safetensors", "model.layers.5.post_attention_layernorm.weight": "model-00009-of-00039.safetensors", "model.layers.5.self_attn.k_proj.bias": "model-00009-of-00039.safetensors", "model.layers.5.self_attn.k_proj.weight": "model-00009-of-00039.safetensors", "model.layers.5.self_attn.o_proj.weight": "model-00009-of-00039.safetensors", "model.layers.5.self_attn.q_proj.bias": "model-00009-of-00039.safetensors", "model.layers.5.self_attn.q_proj.weight": "model-00009-of-00039.safetensors", "model.layers.5.self_attn.v_proj.bias": "model-00009-of-00039.safetensors", "model.layers.5.self_attn.v_proj.weight": "model-00009-of-00039.safetensors", "model.layers.6.input_layernorm.weight": "model-00009-of-00039.safetensors", "model.layers.6.mlp.experts.0.down_proj.weight": "model-00009-of-00039.safetensors", "model.layers.6.mlp.experts.1.down_proj.weight": "model-00009-of-00039.safetensors", "model.layers.6.mlp.experts.2.down_proj.weight": "model-00009-of-00039.safetensors", "model.layers.6.mlp.experts.3.down_proj.weight": "model-00009-of-00039.safetensors", "model.layers.6.mlp.shared_expert.down_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.mlp.experts.0.gate_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.mlp.experts.1.gate_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.mlp.experts.2.gate_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.mlp.experts.3.gate_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.mlp.shared_expert.gate_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.mlp.experts.0.up_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.mlp.experts.1.up_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.mlp.experts.2.up_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.mlp.experts.3.up_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.mlp.shared_expert.up_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.post_attention_layernorm.weight": "model-00010-of-00039.safetensors", "model.layers.6.self_attn.k_proj.bias": "model-00010-of-00039.safetensors", "model.layers.6.self_attn.k_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.self_attn.o_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.self_attn.q_proj.bias": "model-00010-of-00039.safetensors", "model.layers.6.self_attn.q_proj.weight": "model-00010-of-00039.safetensors", "model.layers.6.self_attn.v_proj.bias": "model-00010-of-00039.safetensors", "model.layers.6.self_attn.v_proj.weight": "model-00010-of-00039.safetensors", "model.layers.7.input_layernorm.weight": "model-00010-of-00039.safetensors", "model.layers.7.mlp.experts.0.down_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.experts.1.down_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.experts.2.down_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.experts.3.down_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.shared_expert.down_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.experts.0.gate_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.experts.1.gate_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.experts.2.gate_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.experts.3.gate_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.shared_expert.gate_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.experts.0.up_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.experts.1.up_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.experts.2.up_proj.weight": "model-00011-of-00039.safetensors", "model.layers.7.mlp.experts.3.up_proj.weight": "model-00012-of-00039.safetensors", "model.layers.7.mlp.shared_expert.up_proj.weight": "model-00012-of-00039.safetensors", "model.layers.7.post_attention_layernorm.weight": "model-00012-of-00039.safetensors", "model.layers.7.self_attn.k_proj.bias": "model-00012-of-00039.safetensors", "model.layers.7.self_attn.k_proj.weight": "model-00012-of-00039.safetensors", "model.layers.7.self_attn.o_proj.weight": "model-00012-of-00039.safetensors", "model.layers.7.self_attn.q_proj.bias": "model-00012-of-00039.safetensors", "model.layers.7.self_attn.q_proj.weight": "model-00012-of-00039.safetensors", "model.layers.7.self_attn.v_proj.bias": "model-00012-of-00039.safetensors", "model.layers.7.self_attn.v_proj.weight": "model-00012-of-00039.safetensors", "model.layers.8.input_layernorm.weight": "model-00012-of-00039.safetensors", "model.layers.8.mlp.experts.0.down_proj.weight": "model-00012-of-00039.safetensors", "model.layers.8.mlp.experts.1.down_proj.weight": "model-00012-of-00039.safetensors", "model.layers.8.mlp.experts.2.down_proj.weight": "model-00012-of-00039.safetensors", "model.layers.8.mlp.experts.3.down_proj.weight": "model-00012-of-00039.safetensors", "model.layers.8.mlp.shared_expert.down_proj.weight": "model-00012-of-00039.safetensors", "model.layers.8.mlp.experts.0.gate_proj.weight": "model-00012-of-00039.safetensors", "model.layers.8.mlp.experts.1.gate_proj.weight": "model-00012-of-00039.safetensors", "model.layers.8.mlp.experts.2.gate_proj.weight": "model-00012-of-00039.safetensors", "model.layers.8.mlp.experts.3.gate_proj.weight": "model-00012-of-00039.safetensors", "model.layers.8.mlp.shared_expert.gate_proj.weight": "model-00013-of-00039.safetensors", "model.layers.8.mlp.experts.0.up_proj.weight": "model-00013-of-00039.safetensors", "model.layers.8.mlp.experts.1.up_proj.weight": "model-00013-of-00039.safetensors", "model.layers.8.mlp.experts.2.up_proj.weight": "model-00013-of-00039.safetensors", "model.layers.8.mlp.experts.3.up_proj.weight": "model-00013-of-00039.safetensors", "model.layers.8.mlp.shared_expert.up_proj.weight": "model-00013-of-00039.safetensors", "model.layers.8.post_attention_layernorm.weight": "model-00013-of-00039.safetensors", "model.layers.8.self_attn.k_proj.bias": "model-00013-of-00039.safetensors", "model.layers.8.self_attn.k_proj.weight": "model-00013-of-00039.safetensors", "model.layers.8.self_attn.o_proj.weight": "model-00013-of-00039.safetensors", "model.layers.8.self_attn.q_proj.bias": "model-00013-of-00039.safetensors", "model.layers.8.self_attn.q_proj.weight": "model-00013-of-00039.safetensors", "model.layers.8.self_attn.v_proj.bias": "model-00013-of-00039.safetensors", "model.layers.8.self_attn.v_proj.weight": "model-00013-of-00039.safetensors", "model.layers.9.input_layernorm.weight": "model-00013-of-00039.safetensors", "model.layers.9.mlp.experts.0.down_proj.weight": "model-00013-of-00039.safetensors", "model.layers.9.mlp.experts.1.down_proj.weight": "model-00013-of-00039.safetensors", "model.layers.9.mlp.experts.2.down_proj.weight": "model-00013-of-00039.safetensors", "model.layers.9.mlp.experts.3.down_proj.weight": "model-00013-of-00039.safetensors", "model.layers.9.mlp.shared_expert.down_proj.weight": "model-00013-of-00039.safetensors", "model.layers.9.mlp.experts.0.gate_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.mlp.experts.1.gate_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.mlp.experts.2.gate_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.mlp.experts.3.gate_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.mlp.shared_expert.gate_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.mlp.experts.0.up_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.mlp.experts.1.up_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.mlp.experts.2.up_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.mlp.experts.3.up_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.mlp.shared_expert.up_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.post_attention_layernorm.weight": "model-00014-of-00039.safetensors", "model.layers.9.self_attn.k_proj.bias": "model-00014-of-00039.safetensors", "model.layers.9.self_attn.k_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.self_attn.o_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.self_attn.q_proj.bias": "model-00014-of-00039.safetensors", "model.layers.9.self_attn.q_proj.weight": "model-00014-of-00039.safetensors", "model.layers.9.self_attn.v_proj.bias": "model-00014-of-00039.safetensors", "model.layers.9.self_attn.v_proj.weight": "model-00014-of-00039.safetensors", "model.layers.10.input_layernorm.weight": "model-00014-of-00039.safetensors", "model.layers.10.mlp.experts.0.down_proj.weight": "model-00014-of-00039.safetensors", "model.layers.10.mlp.experts.1.down_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.experts.2.down_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.experts.3.down_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.shared_expert.down_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.experts.0.gate_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.experts.1.gate_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.experts.2.gate_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.experts.3.gate_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.shared_expert.gate_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.experts.0.up_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.experts.1.up_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.experts.2.up_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.experts.3.up_proj.weight": "model-00015-of-00039.safetensors", "model.layers.10.mlp.shared_expert.up_proj.weight": "model-00016-of-00039.safetensors", "model.layers.10.post_attention_layernorm.weight": "model-00016-of-00039.safetensors", "model.layers.10.self_attn.k_proj.bias": "model-00016-of-00039.safetensors", "model.layers.10.self_attn.k_proj.weight": "model-00016-of-00039.safetensors", "model.layers.10.self_attn.o_proj.weight": "model-00016-of-00039.safetensors", "model.layers.10.self_attn.q_proj.bias": "model-00016-of-00039.safetensors", "model.layers.10.self_attn.q_proj.weight": "model-00016-of-00039.safetensors", "model.layers.10.self_attn.v_proj.bias": "model-00016-of-00039.safetensors", "model.layers.10.self_attn.v_proj.weight": "model-00016-of-00039.safetensors", "model.layers.11.input_layernorm.weight": "model-00016-of-00039.safetensors", "model.layers.11.mlp.experts.0.down_proj.weight": "model-00016-of-00039.safetensors", "model.layers.11.mlp.experts.1.down_proj.weight": "model-00016-of-00039.safetensors", "model.layers.11.mlp.experts.2.down_proj.weight": "model-00016-of-00039.safetensors", "model.layers.11.mlp.experts.3.down_proj.weight": "model-00016-of-00039.safetensors", "model.layers.11.mlp.shared_expert.down_proj.weight": "model-00016-of-00039.safetensors", "model.layers.11.mlp.experts.0.gate_proj.weight": "model-00016-of-00039.safetensors", "model.layers.11.mlp.experts.1.gate_proj.weight": "model-00016-of-00039.safetensors", "model.layers.11.mlp.experts.2.gate_proj.weight": "model-00016-of-00039.safetensors", "model.layers.11.mlp.experts.3.gate_proj.weight": "model-00016-of-00039.safetensors", "model.layers.11.mlp.shared_expert.gate_proj.weight": "model-00016-of-00039.safetensors", "model.layers.11.mlp.experts.0.up_proj.weight": "model-00017-of-00039.safetensors", "model.layers.11.mlp.experts.1.up_proj.weight": "model-00017-of-00039.safetensors", "model.layers.11.mlp.experts.2.up_proj.weight": "model-00017-of-00039.safetensors", "model.layers.11.mlp.experts.3.up_proj.weight": "model-00017-of-00039.safetensors", "model.layers.11.mlp.shared_expert.up_proj.weight": "model-00017-of-00039.safetensors", "model.layers.11.post_attention_layernorm.weight": "model-00017-of-00039.safetensors", "model.layers.11.self_attn.k_proj.bias": "model-00017-of-00039.safetensors", "model.layers.11.self_attn.k_proj.weight": "model-00017-of-00039.safetensors", "model.layers.11.self_attn.o_proj.weight": "model-00017-of-00039.safetensors", "model.layers.11.self_attn.q_proj.bias": "model-00017-of-00039.safetensors", "model.layers.11.self_attn.q_proj.weight": "model-00017-of-00039.safetensors", "model.layers.11.self_attn.v_proj.bias": "model-00017-of-00039.safetensors", "model.layers.11.self_attn.v_proj.weight": "model-00017-of-00039.safetensors", "model.layers.12.input_layernorm.weight": "model-00017-of-00039.safetensors", "model.layers.12.mlp.experts.0.down_proj.weight": "model-00017-of-00039.safetensors", "model.layers.12.mlp.experts.1.down_proj.weight": "model-00017-of-00039.safetensors", "model.layers.12.mlp.experts.2.down_proj.weight": "model-00017-of-00039.safetensors", "model.layers.12.mlp.experts.3.down_proj.weight": "model-00017-of-00039.safetensors", "model.layers.12.mlp.shared_expert.down_proj.weight": "model-00017-of-00039.safetensors", "model.layers.12.mlp.experts.0.gate_proj.weight": "model-00017-of-00039.safetensors", "model.layers.12.mlp.experts.1.gate_proj.weight": "model-00018-of-00039.safetensors", "model.layers.12.mlp.experts.2.gate_proj.weight": "model-00018-of-00039.safetensors", "model.layers.12.mlp.experts.3.gate_proj.weight": "model-00018-of-00039.safetensors", "model.layers.12.mlp.shared_expert.gate_proj.weight": "model-00018-of-00039.safetensors", "model.layers.12.mlp.experts.0.up_proj.weight": "model-00018-of-00039.safetensors", "model.layers.12.mlp.experts.1.up_proj.weight": "model-00018-of-00039.safetensors", "model.layers.12.mlp.experts.2.up_proj.weight": "model-00018-of-00039.safetensors", "model.layers.12.mlp.experts.3.up_proj.weight": "model-00018-of-00039.safetensors", "model.layers.12.mlp.shared_expert.up_proj.weight": "model-00018-of-00039.safetensors", "model.layers.12.post_attention_layernorm.weight": "model-00018-of-00039.safetensors", "model.layers.12.self_attn.k_proj.bias": "model-00018-of-00039.safetensors", "model.layers.12.self_attn.k_proj.weight": "model-00018-of-00039.safetensors", "model.layers.12.self_attn.o_proj.weight": "model-00018-of-00039.safetensors", "model.layers.12.self_attn.q_proj.bias": "model-00018-of-00039.safetensors", "model.layers.12.self_attn.q_proj.weight": "model-00018-of-00039.safetensors", "model.layers.12.self_attn.v_proj.bias": "model-00018-of-00039.safetensors", "model.layers.12.self_attn.v_proj.weight": "model-00018-of-00039.safetensors", "model.layers.13.input_layernorm.weight": "model-00018-of-00039.safetensors", "model.layers.13.mlp.experts.0.down_proj.weight": "model-00018-of-00039.safetensors", "model.layers.13.mlp.experts.1.down_proj.weight": "model-00018-of-00039.safetensors", "model.layers.13.mlp.experts.2.down_proj.weight": "model-00018-of-00039.safetensors", "model.layers.13.mlp.experts.3.down_proj.weight": "model-00018-of-00039.safetensors", "model.layers.13.mlp.shared_expert.down_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.mlp.experts.0.gate_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.mlp.experts.1.gate_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.mlp.experts.2.gate_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.mlp.experts.3.gate_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.mlp.shared_expert.gate_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.mlp.experts.0.up_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.mlp.experts.1.up_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.mlp.experts.2.up_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.mlp.experts.3.up_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.mlp.shared_expert.up_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.post_attention_layernorm.weight": "model-00019-of-00039.safetensors", "model.layers.13.self_attn.k_proj.bias": "model-00019-of-00039.safetensors", "model.layers.13.self_attn.k_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.self_attn.o_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.self_attn.q_proj.bias": "model-00019-of-00039.safetensors", "model.layers.13.self_attn.q_proj.weight": "model-00019-of-00039.safetensors", "model.layers.13.self_attn.v_proj.bias": "model-00019-of-00039.safetensors", "model.layers.13.self_attn.v_proj.weight": "model-00019-of-00039.safetensors", "model.layers.14.input_layernorm.weight": "model-00019-of-00039.safetensors", "model.layers.14.mlp.experts.0.down_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.experts.1.down_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.experts.2.down_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.experts.3.down_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.shared_expert.down_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.experts.0.gate_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.experts.1.gate_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.experts.2.gate_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.experts.3.gate_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.shared_expert.gate_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.experts.0.up_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.experts.1.up_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.experts.2.up_proj.weight": "model-00020-of-00039.safetensors", "model.layers.14.mlp.experts.3.up_proj.weight": "model-00021-of-00039.safetensors", "model.layers.14.mlp.shared_expert.up_proj.weight": "model-00021-of-00039.safetensors", "model.layers.14.post_attention_layernorm.weight": "model-00021-of-00039.safetensors", "model.layers.14.self_attn.k_proj.bias": "model-00021-of-00039.safetensors", "model.layers.14.self_attn.k_proj.weight": "model-00021-of-00039.safetensors", "model.layers.14.self_attn.o_proj.weight": "model-00021-of-00039.safetensors", "model.layers.14.self_attn.q_proj.bias": "model-00021-of-00039.safetensors", "model.layers.14.self_attn.q_proj.weight": "model-00021-of-00039.safetensors", "model.layers.14.self_attn.v_proj.bias": "model-00021-of-00039.safetensors", "model.layers.14.self_attn.v_proj.weight": "model-00021-of-00039.safetensors", "model.layers.15.input_layernorm.weight": "model-00021-of-00039.safetensors", "model.layers.15.mlp.experts.0.down_proj.weight": "model-00021-of-00039.safetensors", "model.layers.15.mlp.experts.1.down_proj.weight": "model-00021-of-00039.safetensors", "model.layers.15.mlp.experts.2.down_proj.weight": "model-00021-of-00039.safetensors", "model.layers.15.mlp.experts.3.down_proj.weight": "model-00021-of-00039.safetensors", "model.layers.15.mlp.shared_expert.down_proj.weight": "model-00021-of-00039.safetensors", "model.layers.15.mlp.experts.0.gate_proj.weight": "model-00021-of-00039.safetensors", "model.layers.15.mlp.experts.1.gate_proj.weight": "model-00021-of-00039.safetensors", "model.layers.15.mlp.experts.2.gate_proj.weight": "model-00021-of-00039.safetensors", "model.layers.15.mlp.experts.3.gate_proj.weight": "model-00021-of-00039.safetensors", "model.layers.15.mlp.shared_expert.gate_proj.weight": "model-00022-of-00039.safetensors", "model.layers.15.mlp.experts.0.up_proj.weight": "model-00022-of-00039.safetensors", "model.layers.15.mlp.experts.1.up_proj.weight": "model-00022-of-00039.safetensors", "model.layers.15.mlp.experts.2.up_proj.weight": "model-00022-of-00039.safetensors", "model.layers.15.mlp.experts.3.up_proj.weight": "model-00022-of-00039.safetensors", "model.layers.15.mlp.shared_expert.up_proj.weight": "model-00022-of-00039.safetensors", "model.layers.15.post_attention_layernorm.weight": "model-00022-of-00039.safetensors", "model.layers.15.self_attn.k_proj.bias": "model-00022-of-00039.safetensors", "model.layers.15.self_attn.k_proj.weight": "model-00022-of-00039.safetensors", "model.layers.15.self_attn.o_proj.weight": "model-00022-of-00039.safetensors", "model.layers.15.self_attn.q_proj.bias": "model-00022-of-00039.safetensors", "model.layers.15.self_attn.q_proj.weight": "model-00022-of-00039.safetensors", "model.layers.15.self_attn.v_proj.bias": "model-00022-of-00039.safetensors", "model.layers.15.self_attn.v_proj.weight": "model-00022-of-00039.safetensors", "model.layers.16.input_layernorm.weight": "model-00022-of-00039.safetensors", "model.layers.16.mlp.experts.0.down_proj.weight": "model-00022-of-00039.safetensors", "model.layers.16.mlp.experts.1.down_proj.weight": "model-00022-of-00039.safetensors", "model.layers.16.mlp.experts.2.down_proj.weight": "model-00022-of-00039.safetensors", "model.layers.16.mlp.experts.3.down_proj.weight": "model-00022-of-00039.safetensors", "model.layers.16.mlp.shared_expert.down_proj.weight": "model-00022-of-00039.safetensors", "model.layers.16.mlp.experts.0.gate_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.mlp.experts.1.gate_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.mlp.experts.2.gate_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.mlp.experts.3.gate_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.mlp.shared_expert.gate_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.mlp.experts.0.up_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.mlp.experts.1.up_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.mlp.experts.2.up_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.mlp.experts.3.up_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.mlp.shared_expert.up_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.post_attention_layernorm.weight": "model-00023-of-00039.safetensors", "model.layers.16.self_attn.k_proj.bias": "model-00023-of-00039.safetensors", "model.layers.16.self_attn.k_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.self_attn.o_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.self_attn.q_proj.bias": "model-00023-of-00039.safetensors", "model.layers.16.self_attn.q_proj.weight": "model-00023-of-00039.safetensors", "model.layers.16.self_attn.v_proj.bias": "model-00023-of-00039.safetensors", "model.layers.16.self_attn.v_proj.weight": "model-00023-of-00039.safetensors", "model.layers.17.input_layernorm.weight": "model-00023-of-00039.safetensors", "model.layers.17.mlp.experts.0.down_proj.weight": "model-00023-of-00039.safetensors", "model.layers.17.mlp.experts.1.down_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.experts.2.down_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.experts.3.down_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.shared_expert.down_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.experts.0.gate_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.experts.1.gate_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.experts.2.gate_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.experts.3.gate_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.shared_expert.gate_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.experts.0.up_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.experts.1.up_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.experts.2.up_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.experts.3.up_proj.weight": "model-00024-of-00039.safetensors", "model.layers.17.mlp.shared_expert.up_proj.weight": "model-00025-of-00039.safetensors", "model.layers.17.post_attention_layernorm.weight": "model-00025-of-00039.safetensors", "model.layers.17.self_attn.k_proj.bias": "model-00025-of-00039.safetensors", "model.layers.17.self_attn.k_proj.weight": "model-00025-of-00039.safetensors", "model.layers.17.self_attn.o_proj.weight": "model-00025-of-00039.safetensors", "model.layers.17.self_attn.q_proj.bias": "model-00025-of-00039.safetensors", "model.layers.17.self_attn.q_proj.weight": "model-00025-of-00039.safetensors", "model.layers.17.self_attn.v_proj.bias": "model-00025-of-00039.safetensors", "model.layers.17.self_attn.v_proj.weight": "model-00025-of-00039.safetensors", "model.layers.18.input_layernorm.weight": "model-00025-of-00039.safetensors", "model.layers.18.mlp.experts.0.down_proj.weight": "model-00025-of-00039.safetensors", "model.layers.18.mlp.experts.1.down_proj.weight": "model-00025-of-00039.safetensors", "model.layers.18.mlp.experts.2.down_proj.weight": "model-00025-of-00039.safetensors", "model.layers.18.mlp.experts.3.down_proj.weight": "model-00025-of-00039.safetensors", "model.layers.18.mlp.shared_expert.down_proj.weight": "model-00025-of-00039.safetensors", "model.layers.18.mlp.experts.0.gate_proj.weight": "model-00025-of-00039.safetensors", "model.layers.18.mlp.experts.1.gate_proj.weight": "model-00025-of-00039.safetensors", "model.layers.18.mlp.experts.2.gate_proj.weight": "model-00025-of-00039.safetensors", "model.layers.18.mlp.experts.3.gate_proj.weight": "model-00025-of-00039.safetensors", "model.layers.18.mlp.shared_expert.gate_proj.weight": "model-00025-of-00039.safetensors", "model.layers.18.mlp.experts.0.up_proj.weight": "model-00026-of-00039.safetensors", "model.layers.18.mlp.experts.1.up_proj.weight": "model-00026-of-00039.safetensors", "model.layers.18.mlp.experts.2.up_proj.weight": "model-00026-of-00039.safetensors", "model.layers.18.mlp.experts.3.up_proj.weight": "model-00026-of-00039.safetensors", "model.layers.18.mlp.shared_expert.up_proj.weight": "model-00026-of-00039.safetensors", "model.layers.18.post_attention_layernorm.weight": "model-00026-of-00039.safetensors", "model.layers.18.self_attn.k_proj.bias": "model-00026-of-00039.safetensors", "model.layers.18.self_attn.k_proj.weight": "model-00026-of-00039.safetensors", "model.layers.18.self_attn.o_proj.weight": "model-00026-of-00039.safetensors", "model.layers.18.self_attn.q_proj.bias": "model-00026-of-00039.safetensors", "model.layers.18.self_attn.q_proj.weight": "model-00026-of-00039.safetensors", "model.layers.18.self_attn.v_proj.bias": "model-00026-of-00039.safetensors", "model.layers.18.self_attn.v_proj.weight": "model-00026-of-00039.safetensors", "model.layers.19.input_layernorm.weight": "model-00026-of-00039.safetensors", "model.layers.19.mlp.experts.0.down_proj.weight": "model-00026-of-00039.safetensors", "model.layers.19.mlp.experts.1.down_proj.weight": "model-00026-of-00039.safetensors", "model.layers.19.mlp.experts.2.down_proj.weight": "model-00026-of-00039.safetensors", "model.layers.19.mlp.experts.3.down_proj.weight": "model-00026-of-00039.safetensors", "model.layers.19.mlp.shared_expert.down_proj.weight": "model-00026-of-00039.safetensors", "model.layers.19.mlp.experts.0.gate_proj.weight": "model-00026-of-00039.safetensors", "model.layers.19.mlp.experts.1.gate_proj.weight": "model-00027-of-00039.safetensors", "model.layers.19.mlp.experts.2.gate_proj.weight": "model-00027-of-00039.safetensors", "model.layers.19.mlp.experts.3.gate_proj.weight": "model-00027-of-00039.safetensors", "model.layers.19.mlp.shared_expert.gate_proj.weight": "model-00027-of-00039.safetensors", "model.layers.19.mlp.experts.0.up_proj.weight": "model-00027-of-00039.safetensors", "model.layers.19.mlp.experts.1.up_proj.weight": "model-00027-of-00039.safetensors", "model.layers.19.mlp.experts.2.up_proj.weight": "model-00027-of-00039.safetensors", "model.layers.19.mlp.experts.3.up_proj.weight": "model-00027-of-00039.safetensors", "model.layers.19.mlp.shared_expert.up_proj.weight": "model-00027-of-00039.safetensors", "model.layers.19.post_attention_layernorm.weight": "model-00027-of-00039.safetensors", "model.layers.19.self_attn.k_proj.bias": "model-00027-of-00039.safetensors", "model.layers.19.self_attn.k_proj.weight": "model-00027-of-00039.safetensors", "model.layers.19.self_attn.o_proj.weight": "model-00027-of-00039.safetensors", "model.layers.19.self_attn.q_proj.bias": "model-00027-of-00039.safetensors", "model.layers.19.self_attn.q_proj.weight": "model-00027-of-00039.safetensors", "model.layers.19.self_attn.v_proj.bias": "model-00027-of-00039.safetensors", "model.layers.19.self_attn.v_proj.weight": "model-00027-of-00039.safetensors", "model.layers.20.input_layernorm.weight": "model-00027-of-00039.safetensors", "model.layers.20.mlp.experts.0.down_proj.weight": "model-00027-of-00039.safetensors", "model.layers.20.mlp.experts.1.down_proj.weight": "model-00027-of-00039.safetensors", "model.layers.20.mlp.experts.2.down_proj.weight": "model-00027-of-00039.safetensors", "model.layers.20.mlp.experts.3.down_proj.weight": "model-00027-of-00039.safetensors", "model.layers.20.mlp.shared_expert.down_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.mlp.experts.0.gate_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.mlp.experts.1.gate_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.mlp.experts.2.gate_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.mlp.experts.3.gate_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.mlp.shared_expert.gate_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.mlp.experts.0.up_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.mlp.experts.1.up_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.mlp.experts.2.up_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.mlp.experts.3.up_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.mlp.shared_expert.up_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.post_attention_layernorm.weight": "model-00028-of-00039.safetensors", "model.layers.20.self_attn.k_proj.bias": "model-00028-of-00039.safetensors", "model.layers.20.self_attn.k_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.self_attn.o_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.self_attn.q_proj.bias": "model-00028-of-00039.safetensors", "model.layers.20.self_attn.q_proj.weight": "model-00028-of-00039.safetensors", "model.layers.20.self_attn.v_proj.bias": "model-00028-of-00039.safetensors", "model.layers.20.self_attn.v_proj.weight": "model-00028-of-00039.safetensors", "model.layers.21.input_layernorm.weight": "model-00028-of-00039.safetensors", "model.layers.21.mlp.experts.0.down_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.experts.1.down_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.experts.2.down_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.experts.3.down_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.shared_expert.down_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.experts.0.gate_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.experts.1.gate_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.experts.2.gate_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.experts.3.gate_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.shared_expert.gate_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.experts.0.up_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.experts.1.up_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.experts.2.up_proj.weight": "model-00029-of-00039.safetensors", "model.layers.21.mlp.experts.3.up_proj.weight": "model-00030-of-00039.safetensors", "model.layers.21.mlp.shared_expert.up_proj.weight": "model-00030-of-00039.safetensors", "model.layers.21.post_attention_layernorm.weight": "model-00030-of-00039.safetensors", "model.layers.21.self_attn.k_proj.bias": "model-00030-of-00039.safetensors", "model.layers.21.self_attn.k_proj.weight": "model-00030-of-00039.safetensors", "model.layers.21.self_attn.o_proj.weight": "model-00030-of-00039.safetensors", "model.layers.21.self_attn.q_proj.bias": "model-00030-of-00039.safetensors", "model.layers.21.self_attn.q_proj.weight": "model-00030-of-00039.safetensors", "model.layers.21.self_attn.v_proj.bias": "model-00030-of-00039.safetensors", "model.layers.21.self_attn.v_proj.weight": "model-00030-of-00039.safetensors", "model.layers.22.input_layernorm.weight": "model-00030-of-00039.safetensors", "model.layers.22.mlp.experts.0.down_proj.weight": "model-00030-of-00039.safetensors", "model.layers.22.mlp.experts.1.down_proj.weight": "model-00030-of-00039.safetensors", "model.layers.22.mlp.experts.2.down_proj.weight": "model-00030-of-00039.safetensors", "model.layers.22.mlp.experts.3.down_proj.weight": "model-00030-of-00039.safetensors", "model.layers.22.mlp.shared_expert.down_proj.weight": "model-00030-of-00039.safetensors", "model.layers.22.mlp.experts.0.gate_proj.weight": "model-00030-of-00039.safetensors", "model.layers.22.mlp.experts.1.gate_proj.weight": "model-00030-of-00039.safetensors", "model.layers.22.mlp.experts.2.gate_proj.weight": "model-00030-of-00039.safetensors", "model.layers.22.mlp.experts.3.gate_proj.weight": "model-00030-of-00039.safetensors", "model.layers.22.mlp.shared_expert.gate_proj.weight": "model-00031-of-00039.safetensors", "model.layers.22.mlp.experts.0.up_proj.weight": "model-00031-of-00039.safetensors", "model.layers.22.mlp.experts.1.up_proj.weight": "model-00031-of-00039.safetensors", "model.layers.22.mlp.experts.2.up_proj.weight": "model-00031-of-00039.safetensors", "model.layers.22.mlp.experts.3.up_proj.weight": "model-00031-of-00039.safetensors", "model.layers.22.mlp.shared_expert.up_proj.weight": "model-00031-of-00039.safetensors", "model.layers.22.post_attention_layernorm.weight": "model-00031-of-00039.safetensors", "model.layers.22.self_attn.k_proj.bias": "model-00031-of-00039.safetensors", "model.layers.22.self_attn.k_proj.weight": "model-00031-of-00039.safetensors", "model.layers.22.self_attn.o_proj.weight": "model-00031-of-00039.safetensors", "model.layers.22.self_attn.q_proj.bias": "model-00031-of-00039.safetensors", "model.layers.22.self_attn.q_proj.weight": "model-00031-of-00039.safetensors", "model.layers.22.self_attn.v_proj.bias": "model-00031-of-00039.safetensors", "model.layers.22.self_attn.v_proj.weight": "model-00031-of-00039.safetensors", "model.layers.23.input_layernorm.weight": "model-00031-of-00039.safetensors", "model.layers.23.mlp.experts.0.down_proj.weight": "model-00031-of-00039.safetensors", "model.layers.23.mlp.experts.1.down_proj.weight": "model-00031-of-00039.safetensors", "model.layers.23.mlp.experts.2.down_proj.weight": "model-00031-of-00039.safetensors", "model.layers.23.mlp.experts.3.down_proj.weight": "model-00031-of-00039.safetensors", "model.layers.23.mlp.shared_expert.down_proj.weight": "model-00031-of-00039.safetensors", "model.layers.23.mlp.experts.0.gate_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.mlp.experts.1.gate_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.mlp.experts.2.gate_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.mlp.experts.3.gate_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.mlp.shared_expert.gate_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.mlp.experts.0.up_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.mlp.experts.1.up_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.mlp.experts.2.up_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.mlp.experts.3.up_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.mlp.shared_expert.up_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.post_attention_layernorm.weight": "model-00032-of-00039.safetensors", "model.layers.23.self_attn.k_proj.bias": "model-00032-of-00039.safetensors", "model.layers.23.self_attn.k_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.self_attn.o_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.self_attn.q_proj.bias": "model-00032-of-00039.safetensors", "model.layers.23.self_attn.q_proj.weight": "model-00032-of-00039.safetensors", "model.layers.23.self_attn.v_proj.bias": "model-00032-of-00039.safetensors", "model.layers.23.self_attn.v_proj.weight": "model-00032-of-00039.safetensors", "model.layers.24.input_layernorm.weight": "model-00032-of-00039.safetensors", "model.layers.24.mlp.experts.0.down_proj.weight": "model-00032-of-00039.safetensors", "model.layers.24.mlp.experts.1.down_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.experts.2.down_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.experts.3.down_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.shared_expert.down_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.experts.0.gate_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.experts.1.gate_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.experts.2.gate_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.experts.3.gate_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.shared_expert.gate_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.experts.0.up_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.experts.1.up_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.experts.2.up_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.experts.3.up_proj.weight": "model-00033-of-00039.safetensors", "model.layers.24.mlp.shared_expert.up_proj.weight": "model-00034-of-00039.safetensors", "model.layers.24.post_attention_layernorm.weight": "model-00034-of-00039.safetensors", "model.layers.24.self_attn.k_proj.bias": "model-00034-of-00039.safetensors", "model.layers.24.self_attn.k_proj.weight": "model-00034-of-00039.safetensors", "model.layers.24.self_attn.o_proj.weight": "model-00034-of-00039.safetensors", "model.layers.24.self_attn.q_proj.bias": "model-00034-of-00039.safetensors", "model.layers.24.self_attn.q_proj.weight": "model-00034-of-00039.safetensors", "model.layers.24.self_attn.v_proj.bias": "model-00034-of-00039.safetensors", "model.layers.24.self_attn.v_proj.weight": "model-00034-of-00039.safetensors", "model.layers.25.input_layernorm.weight": "model-00034-of-00039.safetensors", "model.layers.25.mlp.experts.0.down_proj.weight": "model-00034-of-00039.safetensors", "model.layers.25.mlp.experts.1.down_proj.weight": "model-00034-of-00039.safetensors", "model.layers.25.mlp.experts.2.down_proj.weight": "model-00034-of-00039.safetensors", "model.layers.25.mlp.experts.3.down_proj.weight": "model-00034-of-00039.safetensors", "model.layers.25.mlp.shared_expert.down_proj.weight": "model-00034-of-00039.safetensors", "model.layers.25.mlp.experts.0.gate_proj.weight": "model-00034-of-00039.safetensors", "model.layers.25.mlp.experts.1.gate_proj.weight": "model-00034-of-00039.safetensors", "model.layers.25.mlp.experts.2.gate_proj.weight": "model-00034-of-00039.safetensors", "model.layers.25.mlp.experts.3.gate_proj.weight": "model-00034-of-00039.safetensors", "model.layers.25.mlp.shared_expert.gate_proj.weight": "model-00034-of-00039.safetensors", "model.layers.25.mlp.experts.0.up_proj.weight": "model-00035-of-00039.safetensors", "model.layers.25.mlp.experts.1.up_proj.weight": "model-00035-of-00039.safetensors", "model.layers.25.mlp.experts.2.up_proj.weight": "model-00035-of-00039.safetensors", "model.layers.25.mlp.experts.3.up_proj.weight": "model-00035-of-00039.safetensors", "model.layers.25.mlp.shared_expert.up_proj.weight": "model-00035-of-00039.safetensors", "model.layers.25.post_attention_layernorm.weight": "model-00035-of-00039.safetensors", "model.layers.25.self_attn.k_proj.bias": "model-00035-of-00039.safetensors", "model.layers.25.self_attn.k_proj.weight": "model-00035-of-00039.safetensors", "model.layers.25.self_attn.o_proj.weight": "model-00035-of-00039.safetensors", "model.layers.25.self_attn.q_proj.bias": "model-00035-of-00039.safetensors", "model.layers.25.self_attn.q_proj.weight": "model-00035-of-00039.safetensors", "model.layers.25.self_attn.v_proj.bias": "model-00035-of-00039.safetensors", "model.layers.25.self_attn.v_proj.weight": "model-00035-of-00039.safetensors", "model.layers.26.input_layernorm.weight": "model-00035-of-00039.safetensors", "model.layers.26.mlp.experts.0.down_proj.weight": "model-00035-of-00039.safetensors", "model.layers.26.mlp.experts.1.down_proj.weight": "model-00035-of-00039.safetensors", "model.layers.26.mlp.experts.2.down_proj.weight": "model-00035-of-00039.safetensors", "model.layers.26.mlp.experts.3.down_proj.weight": "model-00035-of-00039.safetensors", "model.layers.26.mlp.shared_expert.down_proj.weight": "model-00035-of-00039.safetensors", "model.layers.26.mlp.experts.0.gate_proj.weight": "model-00035-of-00039.safetensors", "model.layers.26.mlp.experts.1.gate_proj.weight": "model-00036-of-00039.safetensors", "model.layers.26.mlp.experts.2.gate_proj.weight": "model-00036-of-00039.safetensors", "model.layers.26.mlp.experts.3.gate_proj.weight": "model-00036-of-00039.safetensors", "model.layers.26.mlp.shared_expert.gate_proj.weight": "model-00036-of-00039.safetensors", "model.layers.26.mlp.experts.0.up_proj.weight": "model-00036-of-00039.safetensors", "model.layers.26.mlp.experts.1.up_proj.weight": "model-00036-of-00039.safetensors", "model.layers.26.mlp.experts.2.up_proj.weight": "model-00036-of-00039.safetensors", "model.layers.26.mlp.experts.3.up_proj.weight": "model-00036-of-00039.safetensors", "model.layers.26.mlp.shared_expert.up_proj.weight": "model-00036-of-00039.safetensors", "model.layers.26.post_attention_layernorm.weight": "model-00036-of-00039.safetensors", "model.layers.26.self_attn.k_proj.bias": "model-00036-of-00039.safetensors", "model.layers.26.self_attn.k_proj.weight": "model-00036-of-00039.safetensors", "model.layers.26.self_attn.o_proj.weight": "model-00036-of-00039.safetensors", "model.layers.26.self_attn.q_proj.bias": "model-00036-of-00039.safetensors", "model.layers.26.self_attn.q_proj.weight": "model-00036-of-00039.safetensors", "model.layers.26.self_attn.v_proj.bias": "model-00036-of-00039.safetensors", "model.layers.26.self_attn.v_proj.weight": "model-00036-of-00039.safetensors", "model.layers.27.input_layernorm.weight": "model-00036-of-00039.safetensors", "model.layers.27.mlp.experts.0.down_proj.weight": "model-00036-of-00039.safetensors", "model.layers.27.mlp.experts.1.down_proj.weight": "model-00036-of-00039.safetensors", "model.layers.27.mlp.experts.2.down_proj.weight": "model-00036-of-00039.safetensors", "model.layers.27.mlp.experts.3.down_proj.weight": "model-00036-of-00039.safetensors", "model.layers.27.mlp.shared_expert.down_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.mlp.experts.0.gate_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.mlp.experts.1.gate_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.mlp.experts.2.gate_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.mlp.experts.3.gate_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.mlp.shared_expert.gate_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.mlp.experts.0.up_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.mlp.experts.1.up_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.mlp.experts.2.up_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.mlp.experts.3.up_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.mlp.shared_expert.up_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.post_attention_layernorm.weight": "model-00037-of-00039.safetensors", "model.layers.27.self_attn.k_proj.bias": "model-00037-of-00039.safetensors", "model.layers.27.self_attn.k_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.self_attn.o_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.self_attn.q_proj.bias": "model-00037-of-00039.safetensors", "model.layers.27.self_attn.q_proj.weight": "model-00037-of-00039.safetensors", "model.layers.27.self_attn.v_proj.bias": "model-00037-of-00039.safetensors", "model.layers.27.self_attn.v_proj.weight": "model-00037-of-00039.safetensors", "model.norm.weight": "model-00037-of-00039.safetensors", "lm_head.weight": "model-00038-of-00039.safetensors", "model.layers.0.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.0.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.1.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.1.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.2.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.2.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.3.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.3.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.4.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.4.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.5.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.5.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.6.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.6.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.7.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.7.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.8.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.8.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.9.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.9.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.10.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.10.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.11.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.11.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.12.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.12.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.13.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.13.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.14.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.14.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.15.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.15.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.16.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.16.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.17.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.17.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.18.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.18.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.19.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.19.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.20.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.20.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.21.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.21.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.22.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.22.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.23.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.23.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.24.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.24.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.25.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.25.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.26.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.26.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors", "model.layers.27.mlp.gate.weight": "model-00039-of-00039.safetensors", "model.layers.27.mlp.shared_expert_gate.weight": "model-00039-of-00039.safetensors"}}
special_tokens_map.json ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "additional_special_tokens": [
3
+ "<|im_start|>",
4
+ "<|im_end|>",
5
+ "<|object_ref_start|>",
6
+ "<|object_ref_end|>",
7
+ "<|box_start|>",
8
+ "<|box_end|>",
9
+ "<|quad_start|>",
10
+ "<|quad_end|>",
11
+ "<|vision_start|>",
12
+ "<|vision_end|>",
13
+ "<|vision_pad|>",
14
+ "<|image_pad|>",
15
+ "<|video_pad|>"
16
+ ],
17
+ "eos_token": {
18
+ "content": "<|im_end|>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ },
24
+ "pad_token": "<|im_end|>"
25
+ }
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63a2951d5edfa5cc0a2346ef872f8c77a2920274cfc3b503b04e3799104dee80
3
+ size 11422060
tokenizer_config.json ADDED
@@ -0,0 +1,211 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": false,
3
+ "add_prefix_space": false,
4
+ "added_tokens_decoder": {
5
+ "151643": {
6
+ "content": "<|endoftext|>",
7
+ "lstrip": false,
8
+ "normalized": false,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "151644": {
14
+ "content": "<|im_start|>",
15
+ "lstrip": false,
16
+ "normalized": false,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "151645": {
22
+ "content": "<|im_end|>",
23
+ "lstrip": false,
24
+ "normalized": false,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ },
29
+ "151646": {
30
+ "content": "<|object_ref_start|>",
31
+ "lstrip": false,
32
+ "normalized": false,
33
+ "rstrip": false,
34
+ "single_word": false,
35
+ "special": true
36
+ },
37
+ "151647": {
38
+ "content": "<|object_ref_end|>",
39
+ "lstrip": false,
40
+ "normalized": false,
41
+ "rstrip": false,
42
+ "single_word": false,
43
+ "special": true
44
+ },
45
+ "151648": {
46
+ "content": "<|box_start|>",
47
+ "lstrip": false,
48
+ "normalized": false,
49
+ "rstrip": false,
50
+ "single_word": false,
51
+ "special": true
52
+ },
53
+ "151649": {
54
+ "content": "<|box_end|>",
55
+ "lstrip": false,
56
+ "normalized": false,
57
+ "rstrip": false,
58
+ "single_word": false,
59
+ "special": true
60
+ },
61
+ "151650": {
62
+ "content": "<|quad_start|>",
63
+ "lstrip": false,
64
+ "normalized": false,
65
+ "rstrip": false,
66
+ "single_word": false,
67
+ "special": true
68
+ },
69
+ "151651": {
70
+ "content": "<|quad_end|>",
71
+ "lstrip": false,
72
+ "normalized": false,
73
+ "rstrip": false,
74
+ "single_word": false,
75
+ "special": true
76
+ },
77
+ "151652": {
78
+ "content": "<|vision_start|>",
79
+ "lstrip": false,
80
+ "normalized": false,
81
+ "rstrip": false,
82
+ "single_word": false,
83
+ "special": true
84
+ },
85
+ "151653": {
86
+ "content": "<|vision_end|>",
87
+ "lstrip": false,
88
+ "normalized": false,
89
+ "rstrip": false,
90
+ "single_word": false,
91
+ "special": true
92
+ },
93
+ "151654": {
94
+ "content": "<|vision_pad|>",
95
+ "lstrip": false,
96
+ "normalized": false,
97
+ "rstrip": false,
98
+ "single_word": false,
99
+ "special": true
100
+ },
101
+ "151655": {
102
+ "content": "<|image_pad|>",
103
+ "lstrip": false,
104
+ "normalized": false,
105
+ "rstrip": false,
106
+ "single_word": false,
107
+ "special": true
108
+ },
109
+ "151656": {
110
+ "content": "<|video_pad|>",
111
+ "lstrip": false,
112
+ "normalized": false,
113
+ "rstrip": false,
114
+ "single_word": false,
115
+ "special": true
116
+ },
117
+ "151657": {
118
+ "content": "<tool_call>",
119
+ "lstrip": false,
120
+ "normalized": false,
121
+ "rstrip": false,
122
+ "single_word": false,
123
+ "special": false
124
+ },
125
+ "151658": {
126
+ "content": "</tool_call>",
127
+ "lstrip": false,
128
+ "normalized": false,
129
+ "rstrip": false,
130
+ "single_word": false,
131
+ "special": false
132
+ },
133
+ "151659": {
134
+ "content": "<|fim_prefix|>",
135
+ "lstrip": false,
136
+ "normalized": false,
137
+ "rstrip": false,
138
+ "single_word": false,
139
+ "special": false
140
+ },
141
+ "151660": {
142
+ "content": "<|fim_middle|>",
143
+ "lstrip": false,
144
+ "normalized": false,
145
+ "rstrip": false,
146
+ "single_word": false,
147
+ "special": false
148
+ },
149
+ "151661": {
150
+ "content": "<|fim_suffix|>",
151
+ "lstrip": false,
152
+ "normalized": false,
153
+ "rstrip": false,
154
+ "single_word": false,
155
+ "special": false
156
+ },
157
+ "151662": {
158
+ "content": "<|fim_pad|>",
159
+ "lstrip": false,
160
+ "normalized": false,
161
+ "rstrip": false,
162
+ "single_word": false,
163
+ "special": false
164
+ },
165
+ "151663": {
166
+ "content": "<|repo_name|>",
167
+ "lstrip": false,
168
+ "normalized": false,
169
+ "rstrip": false,
170
+ "single_word": false,
171
+ "special": false
172
+ },
173
+ "151664": {
174
+ "content": "<|file_sep|>",
175
+ "lstrip": false,
176
+ "normalized": false,
177
+ "rstrip": false,
178
+ "single_word": false,
179
+ "special": false
180
+ }
181
+ },
182
+ "additional_special_tokens": [
183
+ "<|im_start|>",
184
+ "<|im_end|>",
185
+ "<|object_ref_start|>",
186
+ "<|object_ref_end|>",
187
+ "<|box_start|>",
188
+ "<|box_end|>",
189
+ "<|quad_start|>",
190
+ "<|quad_end|>",
191
+ "<|vision_start|>",
192
+ "<|vision_end|>",
193
+ "<|vision_pad|>",
194
+ "<|image_pad|>",
195
+ "<|video_pad|>"
196
+ ],
197
+ "bos_token": null,
198
+ "chat_template": "{%- if tools %}\n {{- '<|im_start|>system\\n' }}\n {%- if messages[0]['role'] == 'system' %}\n {{- messages[0]['content'] }}\n {%- else %}\n {{- 'You are Qwen, created by Alibaba Cloud. You are a helpful assistant.' }}\n {%- endif %}\n {{- \"\\n\\n# Tools\\n\\nYou may call one or more functions to assist with the user query.\\n\\nYou are provided with function signatures within <tools></tools> XML tags:\\n<tools>\" }}\n {%- for tool in tools %}\n {{- \"\\n\" }}\n {{- tool | tojson }}\n {%- endfor %}\n {{- \"\\n</tools>\\n\\nFor each function call, return a json object with function name and arguments within <tool_call></tool_call> XML tags:\\n<tool_call>\\n{{\\\"name\\\": <function-name>, \\\"arguments\\\": <args-json-object>}}\\n</tool_call><|im_end|>\\n\" }}\n{%- else %}\n {%- if messages[0]['role'] == 'system' %}\n {{- '<|im_start|>system\\n' + messages[0]['content'] + '<|im_end|>\\n' }}\n {%- else %}\n {{- '<|im_start|>system\\nYou are Qwen, created by Alibaba Cloud. You are a helpful assistant.<|im_end|>\\n' }}\n {%- endif %}\n{%- endif %}\n{%- for message in messages %}\n {%- if (message.role == \"user\") or (message.role == \"system\" and not loop.first) or (message.role == \"assistant\" and not message.tool_calls) %}\n {{- '<|im_start|>' + message.role + '\\n' + message.content + '<|im_end|>' + '\\n' }}\n {%- elif message.role == \"assistant\" %}\n {{- '<|im_start|>' + message.role }}\n {%- if message.content %}\n {{- '\\n' + message.content }}\n {%- endif %}\n {%- for tool_call in message.tool_calls %}\n {%- if tool_call.function is defined %}\n {%- set tool_call = tool_call.function %}\n {%- endif %}\n {{- '\\n<tool_call>\\n{\"name\": \"' }}\n {{- tool_call.name }}\n {{- '\", \"arguments\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- '}\\n</tool_call>' }}\n {%- endfor %}\n {{- '<|im_end|>\\n' }}\n {%- elif message.role == \"tool\" %}\n {%- if (loop.index0 == 0) or (messages[loop.index0 - 1].role != \"tool\") %}\n {{- '<|im_start|>user' }}\n {%- endif %}\n {{- '\\n<tool_response>\\n' }}\n {{- message.content }}\n {{- '\\n</tool_response>' }}\n {%- if loop.last or (messages[loop.index0 + 1].role != \"tool\") %}\n {{- '<|im_end|>\\n' }}\n {%- endif %}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|im_start|>assistant\\n' }}\n{%- endif %}\n",
199
+ "clean_up_tokenization_spaces": false,
200
+ "eos_token": "<|im_end|>",
201
+ "errors": "replace",
202
+ "max_length": null,
203
+ "model_max_length": 131072,
204
+ "pad_to_multiple_of": null,
205
+ "pad_token": "<|im_end|>",
206
+ "pad_token_type_id": 0,
207
+ "padding_side": "left",
208
+ "split_special_tokens": false,
209
+ "tokenizer_class": "Qwen2Tokenizer",
210
+ "unk_token": null
211
+ }
vocab.json ADDED
The diff for this file is too large to render. See raw diff