SFconvertbot commited on
Commit
3d1a8e2
1 Parent(s): ac0fefb

Adding `safetensors` variant of this model

Browse files

This is an automated PR created with https://huggingface.co/spaces/safetensors/convert

This new file is equivalent to `pytorch_model.bin` but safe in the sense that
no arbitrary code can be put into it.

These files also happen to load much faster than their pytorch counterpart:
https://colab.research.google.com/github/huggingface/notebooks/blob/main/safetensors_doc/en/speed.ipynb

The widgets on your model page will run using this model even if this is not merged
making sure the file actually works.

If you find any issues: please report here: https://huggingface.co/spaces/safetensors/convert/discussions

Feel free to ignore this PR.

Files changed (40) hide show
  1. model-00001-of-00039.safetensors +3 -0
  2. model-00002-of-00039.safetensors +3 -0
  3. model-00003-of-00039.safetensors +3 -0
  4. model-00004-of-00039.safetensors +3 -0
  5. model-00005-of-00039.safetensors +3 -0
  6. model-00006-of-00039.safetensors +3 -0
  7. model-00007-of-00039.safetensors +3 -0
  8. model-00008-of-00039.safetensors +3 -0
  9. model-00009-of-00039.safetensors +3 -0
  10. model-00010-of-00039.safetensors +3 -0
  11. model-00011-of-00039.safetensors +3 -0
  12. model-00012-of-00039.safetensors +3 -0
  13. model-00013-of-00039.safetensors +3 -0
  14. model-00014-of-00039.safetensors +3 -0
  15. model-00015-of-00039.safetensors +3 -0
  16. model-00016-of-00039.safetensors +3 -0
  17. model-00017-of-00039.safetensors +3 -0
  18. model-00018-of-00039.safetensors +3 -0
  19. model-00019-of-00039.safetensors +3 -0
  20. model-00020-of-00039.safetensors +3 -0
  21. model-00021-of-00039.safetensors +3 -0
  22. model-00022-of-00039.safetensors +3 -0
  23. model-00023-of-00039.safetensors +3 -0
  24. model-00024-of-00039.safetensors +3 -0
  25. model-00025-of-00039.safetensors +3 -0
  26. model-00026-of-00039.safetensors +3 -0
  27. model-00027-of-00039.safetensors +3 -0
  28. model-00028-of-00039.safetensors +3 -0
  29. model-00029-of-00039.safetensors +3 -0
  30. model-00030-of-00039.safetensors +3 -0
  31. model-00031-of-00039.safetensors +3 -0
  32. model-00032-of-00039.safetensors +3 -0
  33. model-00033-of-00039.safetensors +3 -0
  34. model-00034-of-00039.safetensors +3 -0
  35. model-00035-of-00039.safetensors +3 -0
  36. model-00036-of-00039.safetensors +3 -0
  37. model-00037-of-00039.safetensors +3 -0
  38. model-00038-of-00039.safetensors +3 -0
  39. model-00039-of-00039.safetensors +3 -0
  40. model.safetensors.index.json +330 -0
model-00001-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f25194235dc43e93e0ee9e056af3171a5b4fd279a084c827228b49df6e22307
3
+ size 396362680
model-00002-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a1b24d9a93b2bd05585985102c9d87133247e9da98006f4d3c8e6c8733fee49
3
+ size 371213216
model-00003-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b883d21c9886029bcbd83d864ac0fa8a9e57c5ceeec1136e075c2c5132576c9f
3
+ size 371213568
model-00004-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ec6fadcac99e18c88df5a9889dd02ff1799ba83a931c24e3c16b381052994ab4
3
+ size 371213568
model-00005-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:01d6229332e74b071b8dcfb44bd0ac3a6ac736b14313d3e9b38f4151b6e88a39
3
+ size 371213568
model-00006-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d6283779430d1a3048aaa62bcc01836a8b8eab40de1ea10d96276879a4089909
3
+ size 314573872
model-00007-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d03584c23a342a31aa9961ca97315e9a02fa459c8160ec89336be23fb4cdd32a
3
+ size 314590464
model-00008-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7eb8d0d466154869a0eaf55e264765dfa2f3ffa7db0081acd0b02d3d2e11b702
3
+ size 314590464
model-00009-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c15bbd7bed37ce9586640f29255f3fcc458933d9f11d07ff4caa0a621ee7c92
3
+ size 371213216
model-00010-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c4a2d850f3fb754120e721eb89e12a0f684fbd636631d1e43513c530f105de8
3
+ size 371213568
model-00011-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2fde956202548f512991889dd09cdd9c3012adfb06bd948e024349713b5ba8f2
3
+ size 371213568
model-00012-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6682e13b5aaa199225bcc8e61a6ff68964a37fcbacfad07a5aa93dea9222f41
3
+ size 371213568
model-00013-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b6ce92aed14b0f2b6f1389023b849a95a08cb2597c5293d4581b9442e517adb
3
+ size 314573880
model-00014-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14640cbec2cb9129c7b82b86d5e88f54d84bf14df7daa960b23cf438619a9969
3
+ size 314590472
model-00015-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bfc0292c3cb2173137e18240eb1f04c78e3ee99ef7b81b8ce776f0198a405bf8
3
+ size 314590480
model-00016-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fbcbfff6b7eb210856fc88522f866a4e7f70978ca0e778ddd3209e9aebfe1cb0
3
+ size 371213224
model-00017-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30af9ff286e60d3b4d0696e2e334a40042e7cbd892c4b1671d8e6598e450fc67
3
+ size 371213576
model-00018-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c782ad003707d2220d7eff4be14b9a2716b98faab815c83e4170e74a644b895
3
+ size 371213576
model-00019-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b1f4daa0997f861b713546ec98319d949ba77b805f88d925cb916224246777e0
3
+ size 371213576
model-00020-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c2009e827ab513262ea732a57db3cc1d721ef7a0dde106a2580b0196b600a71
3
+ size 314573880
model-00021-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:939d8e76565b8bb0387ea72f7bcc44591ba25da85f04f89c9ebaaf98dfb756c9
3
+ size 314590472
model-00022-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5fa4deae02f5dae36264d4cea73b866817e3ed7f2f46e39e5c2bd35a0a0b3dab
3
+ size 314590480
model-00023-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30ca4cee4bd2ee761ee7162b2f6b95d19aeb3dc943fabc26e6976154b67782a2
3
+ size 371213224
model-00024-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:051f51d1c407fd2404d102a1ce631438dbb0d622ab0d25cb2c2e7cba575aae61
3
+ size 371213576
model-00025-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:482e38fb9bb938820bbe4bf94880497475884a3d3bd300c170bab7d395b5d179
3
+ size 371213576
model-00026-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07af5dbbb258fc50f66359e1366069c653fe1039c5442fc3590e995711941269
3
+ size 371213576
model-00027-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a3f589dc53b7380b9d093e3f0b8f2e783f8c182b74f992daa04098591f5b6d9
3
+ size 314573880
model-00028-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86fed18094c9b24de4e39a652ec756a2beab34da04a97a029d736311577a5b6a
3
+ size 314590472
model-00029-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c53e03378e973e0f42f1d056c66c1bbec095e8e58b19f5ec6b9f0cc53ad1818
3
+ size 314590480
model-00030-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0a6144b6c2256993948b312f1d154a4afc01eec55d082f08c46a3a049ba98ee
3
+ size 371213224
model-00031-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:901a2dd6d751bb0906692573efc30cf5235b52f4d401dae2eb4704750e91da4e
3
+ size 371213576
model-00032-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9c9acdbdf705e5bc6cf900430d1965ce9520b0c150e976e43b8f7700ef85dc0
3
+ size 371213576
model-00033-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:373d99edb982e99614f3b7460be26ee7e633a0ac25f9e7ab7da3fe8723667837
3
+ size 371213576
model-00034-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:461a3a79d86b1144130f691677d8088e46e307b345e4e3eea71566b0c880b23e
3
+ size 314573880
model-00035-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c53e5ca1b324908007d704812c0a6780b882b3d9452b62979b9310c958d69bf4
3
+ size 314590472
model-00036-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd30bcdaa6712034b2f530fc695be68ef200201ca75f4b3858e4c8b2790e256b
3
+ size 314590480
model-00037-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0db1b408d4f2779ce946579cd0eaa59be079b0cc39df770bda164449440be63
3
+ size 371213224
model-00038-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e1f90e34067228916ca2fb343afec931b812d11d8eeb64f35654aaaaba01ceaf
3
+ size 304112768
model-00039-of-00039.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f81ca1f06a26d4059a6a089b59eed60a628144ba94062da035a2648bdf77ed38
3
+ size 262144128
model.safetensors.index.json ADDED
@@ -0,0 +1,330 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_size": 13476839424
4
+ },
5
+ "weight_map": {
6
+ "lm_head.weight": "model-00039-of-00039.safetensors",
7
+ "model.embed_tokens.weight": "model-00001-of-00039.safetensors",
8
+ "model.layers.0.input_layernorm.weight": "model-00002-of-00039.safetensors",
9
+ "model.layers.0.mlp.down_proj.weight": "model-00002-of-00039.safetensors",
10
+ "model.layers.0.mlp.gate_proj.weight": "model-00002-of-00039.safetensors",
11
+ "model.layers.0.mlp.up_proj.weight": "model-00002-of-00039.safetensors",
12
+ "model.layers.0.post_attention_layernorm.weight": "model-00002-of-00039.safetensors",
13
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00039.safetensors",
14
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00039.safetensors",
15
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00039.safetensors",
16
+ "model.layers.0.self_attn.rotary_emb.inv_freq": "model-00001-of-00039.safetensors",
17
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00039.safetensors",
18
+ "model.layers.1.input_layernorm.weight": "model-00003-of-00039.safetensors",
19
+ "model.layers.1.mlp.down_proj.weight": "model-00003-of-00039.safetensors",
20
+ "model.layers.1.mlp.gate_proj.weight": "model-00003-of-00039.safetensors",
21
+ "model.layers.1.mlp.up_proj.weight": "model-00003-of-00039.safetensors",
22
+ "model.layers.1.post_attention_layernorm.weight": "model-00003-of-00039.safetensors",
23
+ "model.layers.1.self_attn.k_proj.weight": "model-00002-of-00039.safetensors",
24
+ "model.layers.1.self_attn.o_proj.weight": "model-00003-of-00039.safetensors",
25
+ "model.layers.1.self_attn.q_proj.weight": "model-00002-of-00039.safetensors",
26
+ "model.layers.1.self_attn.rotary_emb.inv_freq": "model-00003-of-00039.safetensors",
27
+ "model.layers.1.self_attn.v_proj.weight": "model-00002-of-00039.safetensors",
28
+ "model.layers.10.input_layernorm.weight": "model-00014-of-00039.safetensors",
29
+ "model.layers.10.mlp.down_proj.weight": "model-00013-of-00039.safetensors",
30
+ "model.layers.10.mlp.gate_proj.weight": "model-00013-of-00039.safetensors",
31
+ "model.layers.10.mlp.up_proj.weight": "model-00014-of-00039.safetensors",
32
+ "model.layers.10.post_attention_layernorm.weight": "model-00014-of-00039.safetensors",
33
+ "model.layers.10.self_attn.k_proj.weight": "model-00013-of-00039.safetensors",
34
+ "model.layers.10.self_attn.o_proj.weight": "model-00013-of-00039.safetensors",
35
+ "model.layers.10.self_attn.q_proj.weight": "model-00013-of-00039.safetensors",
36
+ "model.layers.10.self_attn.rotary_emb.inv_freq": "model-00013-of-00039.safetensors",
37
+ "model.layers.10.self_attn.v_proj.weight": "model-00013-of-00039.safetensors",
38
+ "model.layers.11.input_layernorm.weight": "model-00015-of-00039.safetensors",
39
+ "model.layers.11.mlp.down_proj.weight": "model-00015-of-00039.safetensors",
40
+ "model.layers.11.mlp.gate_proj.weight": "model-00014-of-00039.safetensors",
41
+ "model.layers.11.mlp.up_proj.weight": "model-00015-of-00039.safetensors",
42
+ "model.layers.11.post_attention_layernorm.weight": "model-00015-of-00039.safetensors",
43
+ "model.layers.11.self_attn.k_proj.weight": "model-00014-of-00039.safetensors",
44
+ "model.layers.11.self_attn.o_proj.weight": "model-00014-of-00039.safetensors",
45
+ "model.layers.11.self_attn.q_proj.weight": "model-00014-of-00039.safetensors",
46
+ "model.layers.11.self_attn.rotary_emb.inv_freq": "model-00014-of-00039.safetensors",
47
+ "model.layers.11.self_attn.v_proj.weight": "model-00014-of-00039.safetensors",
48
+ "model.layers.12.input_layernorm.weight": "model-00016-of-00039.safetensors",
49
+ "model.layers.12.mlp.down_proj.weight": "model-00016-of-00039.safetensors",
50
+ "model.layers.12.mlp.gate_proj.weight": "model-00016-of-00039.safetensors",
51
+ "model.layers.12.mlp.up_proj.weight": "model-00016-of-00039.safetensors",
52
+ "model.layers.12.post_attention_layernorm.weight": "model-00016-of-00039.safetensors",
53
+ "model.layers.12.self_attn.k_proj.weight": "model-00015-of-00039.safetensors",
54
+ "model.layers.12.self_attn.o_proj.weight": "model-00015-of-00039.safetensors",
55
+ "model.layers.12.self_attn.q_proj.weight": "model-00015-of-00039.safetensors",
56
+ "model.layers.12.self_attn.rotary_emb.inv_freq": "model-00015-of-00039.safetensors",
57
+ "model.layers.12.self_attn.v_proj.weight": "model-00015-of-00039.safetensors",
58
+ "model.layers.13.input_layernorm.weight": "model-00017-of-00039.safetensors",
59
+ "model.layers.13.mlp.down_proj.weight": "model-00017-of-00039.safetensors",
60
+ "model.layers.13.mlp.gate_proj.weight": "model-00017-of-00039.safetensors",
61
+ "model.layers.13.mlp.up_proj.weight": "model-00017-of-00039.safetensors",
62
+ "model.layers.13.post_attention_layernorm.weight": "model-00017-of-00039.safetensors",
63
+ "model.layers.13.self_attn.k_proj.weight": "model-00016-of-00039.safetensors",
64
+ "model.layers.13.self_attn.o_proj.weight": "model-00017-of-00039.safetensors",
65
+ "model.layers.13.self_attn.q_proj.weight": "model-00016-of-00039.safetensors",
66
+ "model.layers.13.self_attn.rotary_emb.inv_freq": "model-00017-of-00039.safetensors",
67
+ "model.layers.13.self_attn.v_proj.weight": "model-00016-of-00039.safetensors",
68
+ "model.layers.14.input_layernorm.weight": "model-00018-of-00039.safetensors",
69
+ "model.layers.14.mlp.down_proj.weight": "model-00018-of-00039.safetensors",
70
+ "model.layers.14.mlp.gate_proj.weight": "model-00018-of-00039.safetensors",
71
+ "model.layers.14.mlp.up_proj.weight": "model-00018-of-00039.safetensors",
72
+ "model.layers.14.post_attention_layernorm.weight": "model-00018-of-00039.safetensors",
73
+ "model.layers.14.self_attn.k_proj.weight": "model-00017-of-00039.safetensors",
74
+ "model.layers.14.self_attn.o_proj.weight": "model-00018-of-00039.safetensors",
75
+ "model.layers.14.self_attn.q_proj.weight": "model-00017-of-00039.safetensors",
76
+ "model.layers.14.self_attn.rotary_emb.inv_freq": "model-00018-of-00039.safetensors",
77
+ "model.layers.14.self_attn.v_proj.weight": "model-00018-of-00039.safetensors",
78
+ "model.layers.15.input_layernorm.weight": "model-00019-of-00039.safetensors",
79
+ "model.layers.15.mlp.down_proj.weight": "model-00019-of-00039.safetensors",
80
+ "model.layers.15.mlp.gate_proj.weight": "model-00019-of-00039.safetensors",
81
+ "model.layers.15.mlp.up_proj.weight": "model-00019-of-00039.safetensors",
82
+ "model.layers.15.post_attention_layernorm.weight": "model-00019-of-00039.safetensors",
83
+ "model.layers.15.self_attn.k_proj.weight": "model-00019-of-00039.safetensors",
84
+ "model.layers.15.self_attn.o_proj.weight": "model-00019-of-00039.safetensors",
85
+ "model.layers.15.self_attn.q_proj.weight": "model-00018-of-00039.safetensors",
86
+ "model.layers.15.self_attn.rotary_emb.inv_freq": "model-00019-of-00039.safetensors",
87
+ "model.layers.15.self_attn.v_proj.weight": "model-00019-of-00039.safetensors",
88
+ "model.layers.16.input_layernorm.weight": "model-00021-of-00039.safetensors",
89
+ "model.layers.16.mlp.down_proj.weight": "model-00020-of-00039.safetensors",
90
+ "model.layers.16.mlp.gate_proj.weight": "model-00020-of-00039.safetensors",
91
+ "model.layers.16.mlp.up_proj.weight": "model-00021-of-00039.safetensors",
92
+ "model.layers.16.post_attention_layernorm.weight": "model-00021-of-00039.safetensors",
93
+ "model.layers.16.self_attn.k_proj.weight": "model-00020-of-00039.safetensors",
94
+ "model.layers.16.self_attn.o_proj.weight": "model-00020-of-00039.safetensors",
95
+ "model.layers.16.self_attn.q_proj.weight": "model-00020-of-00039.safetensors",
96
+ "model.layers.16.self_attn.rotary_emb.inv_freq": "model-00020-of-00039.safetensors",
97
+ "model.layers.16.self_attn.v_proj.weight": "model-00020-of-00039.safetensors",
98
+ "model.layers.17.input_layernorm.weight": "model-00022-of-00039.safetensors",
99
+ "model.layers.17.mlp.down_proj.weight": "model-00022-of-00039.safetensors",
100
+ "model.layers.17.mlp.gate_proj.weight": "model-00021-of-00039.safetensors",
101
+ "model.layers.17.mlp.up_proj.weight": "model-00022-of-00039.safetensors",
102
+ "model.layers.17.post_attention_layernorm.weight": "model-00022-of-00039.safetensors",
103
+ "model.layers.17.self_attn.k_proj.weight": "model-00021-of-00039.safetensors",
104
+ "model.layers.17.self_attn.o_proj.weight": "model-00021-of-00039.safetensors",
105
+ "model.layers.17.self_attn.q_proj.weight": "model-00021-of-00039.safetensors",
106
+ "model.layers.17.self_attn.rotary_emb.inv_freq": "model-00021-of-00039.safetensors",
107
+ "model.layers.17.self_attn.v_proj.weight": "model-00021-of-00039.safetensors",
108
+ "model.layers.18.input_layernorm.weight": "model-00023-of-00039.safetensors",
109
+ "model.layers.18.mlp.down_proj.weight": "model-00023-of-00039.safetensors",
110
+ "model.layers.18.mlp.gate_proj.weight": "model-00023-of-00039.safetensors",
111
+ "model.layers.18.mlp.up_proj.weight": "model-00023-of-00039.safetensors",
112
+ "model.layers.18.post_attention_layernorm.weight": "model-00023-of-00039.safetensors",
113
+ "model.layers.18.self_attn.k_proj.weight": "model-00022-of-00039.safetensors",
114
+ "model.layers.18.self_attn.o_proj.weight": "model-00022-of-00039.safetensors",
115
+ "model.layers.18.self_attn.q_proj.weight": "model-00022-of-00039.safetensors",
116
+ "model.layers.18.self_attn.rotary_emb.inv_freq": "model-00022-of-00039.safetensors",
117
+ "model.layers.18.self_attn.v_proj.weight": "model-00022-of-00039.safetensors",
118
+ "model.layers.19.input_layernorm.weight": "model-00024-of-00039.safetensors",
119
+ "model.layers.19.mlp.down_proj.weight": "model-00024-of-00039.safetensors",
120
+ "model.layers.19.mlp.gate_proj.weight": "model-00024-of-00039.safetensors",
121
+ "model.layers.19.mlp.up_proj.weight": "model-00024-of-00039.safetensors",
122
+ "model.layers.19.post_attention_layernorm.weight": "model-00024-of-00039.safetensors",
123
+ "model.layers.19.self_attn.k_proj.weight": "model-00023-of-00039.safetensors",
124
+ "model.layers.19.self_attn.o_proj.weight": "model-00024-of-00039.safetensors",
125
+ "model.layers.19.self_attn.q_proj.weight": "model-00023-of-00039.safetensors",
126
+ "model.layers.19.self_attn.rotary_emb.inv_freq": "model-00024-of-00039.safetensors",
127
+ "model.layers.19.self_attn.v_proj.weight": "model-00023-of-00039.safetensors",
128
+ "model.layers.2.input_layernorm.weight": "model-00004-of-00039.safetensors",
129
+ "model.layers.2.mlp.down_proj.weight": "model-00004-of-00039.safetensors",
130
+ "model.layers.2.mlp.gate_proj.weight": "model-00004-of-00039.safetensors",
131
+ "model.layers.2.mlp.up_proj.weight": "model-00004-of-00039.safetensors",
132
+ "model.layers.2.post_attention_layernorm.weight": "model-00004-of-00039.safetensors",
133
+ "model.layers.2.self_attn.k_proj.weight": "model-00003-of-00039.safetensors",
134
+ "model.layers.2.self_attn.o_proj.weight": "model-00004-of-00039.safetensors",
135
+ "model.layers.2.self_attn.q_proj.weight": "model-00003-of-00039.safetensors",
136
+ "model.layers.2.self_attn.rotary_emb.inv_freq": "model-00004-of-00039.safetensors",
137
+ "model.layers.2.self_attn.v_proj.weight": "model-00004-of-00039.safetensors",
138
+ "model.layers.20.input_layernorm.weight": "model-00025-of-00039.safetensors",
139
+ "model.layers.20.mlp.down_proj.weight": "model-00025-of-00039.safetensors",
140
+ "model.layers.20.mlp.gate_proj.weight": "model-00025-of-00039.safetensors",
141
+ "model.layers.20.mlp.up_proj.weight": "model-00025-of-00039.safetensors",
142
+ "model.layers.20.post_attention_layernorm.weight": "model-00025-of-00039.safetensors",
143
+ "model.layers.20.self_attn.k_proj.weight": "model-00024-of-00039.safetensors",
144
+ "model.layers.20.self_attn.o_proj.weight": "model-00025-of-00039.safetensors",
145
+ "model.layers.20.self_attn.q_proj.weight": "model-00024-of-00039.safetensors",
146
+ "model.layers.20.self_attn.rotary_emb.inv_freq": "model-00025-of-00039.safetensors",
147
+ "model.layers.20.self_attn.v_proj.weight": "model-00025-of-00039.safetensors",
148
+ "model.layers.21.input_layernorm.weight": "model-00026-of-00039.safetensors",
149
+ "model.layers.21.mlp.down_proj.weight": "model-00026-of-00039.safetensors",
150
+ "model.layers.21.mlp.gate_proj.weight": "model-00026-of-00039.safetensors",
151
+ "model.layers.21.mlp.up_proj.weight": "model-00026-of-00039.safetensors",
152
+ "model.layers.21.post_attention_layernorm.weight": "model-00026-of-00039.safetensors",
153
+ "model.layers.21.self_attn.k_proj.weight": "model-00026-of-00039.safetensors",
154
+ "model.layers.21.self_attn.o_proj.weight": "model-00026-of-00039.safetensors",
155
+ "model.layers.21.self_attn.q_proj.weight": "model-00025-of-00039.safetensors",
156
+ "model.layers.21.self_attn.rotary_emb.inv_freq": "model-00026-of-00039.safetensors",
157
+ "model.layers.21.self_attn.v_proj.weight": "model-00026-of-00039.safetensors",
158
+ "model.layers.22.input_layernorm.weight": "model-00028-of-00039.safetensors",
159
+ "model.layers.22.mlp.down_proj.weight": "model-00027-of-00039.safetensors",
160
+ "model.layers.22.mlp.gate_proj.weight": "model-00027-of-00039.safetensors",
161
+ "model.layers.22.mlp.up_proj.weight": "model-00028-of-00039.safetensors",
162
+ "model.layers.22.post_attention_layernorm.weight": "model-00028-of-00039.safetensors",
163
+ "model.layers.22.self_attn.k_proj.weight": "model-00027-of-00039.safetensors",
164
+ "model.layers.22.self_attn.o_proj.weight": "model-00027-of-00039.safetensors",
165
+ "model.layers.22.self_attn.q_proj.weight": "model-00027-of-00039.safetensors",
166
+ "model.layers.22.self_attn.rotary_emb.inv_freq": "model-00027-of-00039.safetensors",
167
+ "model.layers.22.self_attn.v_proj.weight": "model-00027-of-00039.safetensors",
168
+ "model.layers.23.input_layernorm.weight": "model-00029-of-00039.safetensors",
169
+ "model.layers.23.mlp.down_proj.weight": "model-00029-of-00039.safetensors",
170
+ "model.layers.23.mlp.gate_proj.weight": "model-00028-of-00039.safetensors",
171
+ "model.layers.23.mlp.up_proj.weight": "model-00029-of-00039.safetensors",
172
+ "model.layers.23.post_attention_layernorm.weight": "model-00029-of-00039.safetensors",
173
+ "model.layers.23.self_attn.k_proj.weight": "model-00028-of-00039.safetensors",
174
+ "model.layers.23.self_attn.o_proj.weight": "model-00028-of-00039.safetensors",
175
+ "model.layers.23.self_attn.q_proj.weight": "model-00028-of-00039.safetensors",
176
+ "model.layers.23.self_attn.rotary_emb.inv_freq": "model-00028-of-00039.safetensors",
177
+ "model.layers.23.self_attn.v_proj.weight": "model-00028-of-00039.safetensors",
178
+ "model.layers.24.input_layernorm.weight": "model-00030-of-00039.safetensors",
179
+ "model.layers.24.mlp.down_proj.weight": "model-00030-of-00039.safetensors",
180
+ "model.layers.24.mlp.gate_proj.weight": "model-00030-of-00039.safetensors",
181
+ "model.layers.24.mlp.up_proj.weight": "model-00030-of-00039.safetensors",
182
+ "model.layers.24.post_attention_layernorm.weight": "model-00030-of-00039.safetensors",
183
+ "model.layers.24.self_attn.k_proj.weight": "model-00029-of-00039.safetensors",
184
+ "model.layers.24.self_attn.o_proj.weight": "model-00029-of-00039.safetensors",
185
+ "model.layers.24.self_attn.q_proj.weight": "model-00029-of-00039.safetensors",
186
+ "model.layers.24.self_attn.rotary_emb.inv_freq": "model-00029-of-00039.safetensors",
187
+ "model.layers.24.self_attn.v_proj.weight": "model-00029-of-00039.safetensors",
188
+ "model.layers.25.input_layernorm.weight": "model-00031-of-00039.safetensors",
189
+ "model.layers.25.mlp.down_proj.weight": "model-00031-of-00039.safetensors",
190
+ "model.layers.25.mlp.gate_proj.weight": "model-00031-of-00039.safetensors",
191
+ "model.layers.25.mlp.up_proj.weight": "model-00031-of-00039.safetensors",
192
+ "model.layers.25.post_attention_layernorm.weight": "model-00031-of-00039.safetensors",
193
+ "model.layers.25.self_attn.k_proj.weight": "model-00030-of-00039.safetensors",
194
+ "model.layers.25.self_attn.o_proj.weight": "model-00031-of-00039.safetensors",
195
+ "model.layers.25.self_attn.q_proj.weight": "model-00030-of-00039.safetensors",
196
+ "model.layers.25.self_attn.rotary_emb.inv_freq": "model-00031-of-00039.safetensors",
197
+ "model.layers.25.self_attn.v_proj.weight": "model-00030-of-00039.safetensors",
198
+ "model.layers.26.input_layernorm.weight": "model-00032-of-00039.safetensors",
199
+ "model.layers.26.mlp.down_proj.weight": "model-00032-of-00039.safetensors",
200
+ "model.layers.26.mlp.gate_proj.weight": "model-00032-of-00039.safetensors",
201
+ "model.layers.26.mlp.up_proj.weight": "model-00032-of-00039.safetensors",
202
+ "model.layers.26.post_attention_layernorm.weight": "model-00032-of-00039.safetensors",
203
+ "model.layers.26.self_attn.k_proj.weight": "model-00031-of-00039.safetensors",
204
+ "model.layers.26.self_attn.o_proj.weight": "model-00032-of-00039.safetensors",
205
+ "model.layers.26.self_attn.q_proj.weight": "model-00031-of-00039.safetensors",
206
+ "model.layers.26.self_attn.rotary_emb.inv_freq": "model-00032-of-00039.safetensors",
207
+ "model.layers.26.self_attn.v_proj.weight": "model-00032-of-00039.safetensors",
208
+ "model.layers.27.input_layernorm.weight": "model-00033-of-00039.safetensors",
209
+ "model.layers.27.mlp.down_proj.weight": "model-00033-of-00039.safetensors",
210
+ "model.layers.27.mlp.gate_proj.weight": "model-00033-of-00039.safetensors",
211
+ "model.layers.27.mlp.up_proj.weight": "model-00033-of-00039.safetensors",
212
+ "model.layers.27.post_attention_layernorm.weight": "model-00033-of-00039.safetensors",
213
+ "model.layers.27.self_attn.k_proj.weight": "model-00033-of-00039.safetensors",
214
+ "model.layers.27.self_attn.o_proj.weight": "model-00033-of-00039.safetensors",
215
+ "model.layers.27.self_attn.q_proj.weight": "model-00032-of-00039.safetensors",
216
+ "model.layers.27.self_attn.rotary_emb.inv_freq": "model-00033-of-00039.safetensors",
217
+ "model.layers.27.self_attn.v_proj.weight": "model-00033-of-00039.safetensors",
218
+ "model.layers.28.input_layernorm.weight": "model-00035-of-00039.safetensors",
219
+ "model.layers.28.mlp.down_proj.weight": "model-00034-of-00039.safetensors",
220
+ "model.layers.28.mlp.gate_proj.weight": "model-00034-of-00039.safetensors",
221
+ "model.layers.28.mlp.up_proj.weight": "model-00035-of-00039.safetensors",
222
+ "model.layers.28.post_attention_layernorm.weight": "model-00035-of-00039.safetensors",
223
+ "model.layers.28.self_attn.k_proj.weight": "model-00034-of-00039.safetensors",
224
+ "model.layers.28.self_attn.o_proj.weight": "model-00034-of-00039.safetensors",
225
+ "model.layers.28.self_attn.q_proj.weight": "model-00034-of-00039.safetensors",
226
+ "model.layers.28.self_attn.rotary_emb.inv_freq": "model-00034-of-00039.safetensors",
227
+ "model.layers.28.self_attn.v_proj.weight": "model-00034-of-00039.safetensors",
228
+ "model.layers.29.input_layernorm.weight": "model-00036-of-00039.safetensors",
229
+ "model.layers.29.mlp.down_proj.weight": "model-00036-of-00039.safetensors",
230
+ "model.layers.29.mlp.gate_proj.weight": "model-00035-of-00039.safetensors",
231
+ "model.layers.29.mlp.up_proj.weight": "model-00036-of-00039.safetensors",
232
+ "model.layers.29.post_attention_layernorm.weight": "model-00036-of-00039.safetensors",
233
+ "model.layers.29.self_attn.k_proj.weight": "model-00035-of-00039.safetensors",
234
+ "model.layers.29.self_attn.o_proj.weight": "model-00035-of-00039.safetensors",
235
+ "model.layers.29.self_attn.q_proj.weight": "model-00035-of-00039.safetensors",
236
+ "model.layers.29.self_attn.rotary_emb.inv_freq": "model-00035-of-00039.safetensors",
237
+ "model.layers.29.self_attn.v_proj.weight": "model-00035-of-00039.safetensors",
238
+ "model.layers.3.input_layernorm.weight": "model-00005-of-00039.safetensors",
239
+ "model.layers.3.mlp.down_proj.weight": "model-00005-of-00039.safetensors",
240
+ "model.layers.3.mlp.gate_proj.weight": "model-00005-of-00039.safetensors",
241
+ "model.layers.3.mlp.up_proj.weight": "model-00005-of-00039.safetensors",
242
+ "model.layers.3.post_attention_layernorm.weight": "model-00005-of-00039.safetensors",
243
+ "model.layers.3.self_attn.k_proj.weight": "model-00005-of-00039.safetensors",
244
+ "model.layers.3.self_attn.o_proj.weight": "model-00005-of-00039.safetensors",
245
+ "model.layers.3.self_attn.q_proj.weight": "model-00004-of-00039.safetensors",
246
+ "model.layers.3.self_attn.rotary_emb.inv_freq": "model-00005-of-00039.safetensors",
247
+ "model.layers.3.self_attn.v_proj.weight": "model-00005-of-00039.safetensors",
248
+ "model.layers.30.input_layernorm.weight": "model-00037-of-00039.safetensors",
249
+ "model.layers.30.mlp.down_proj.weight": "model-00037-of-00039.safetensors",
250
+ "model.layers.30.mlp.gate_proj.weight": "model-00037-of-00039.safetensors",
251
+ "model.layers.30.mlp.up_proj.weight": "model-00037-of-00039.safetensors",
252
+ "model.layers.30.post_attention_layernorm.weight": "model-00037-of-00039.safetensors",
253
+ "model.layers.30.self_attn.k_proj.weight": "model-00036-of-00039.safetensors",
254
+ "model.layers.30.self_attn.o_proj.weight": "model-00036-of-00039.safetensors",
255
+ "model.layers.30.self_attn.q_proj.weight": "model-00036-of-00039.safetensors",
256
+ "model.layers.30.self_attn.rotary_emb.inv_freq": "model-00036-of-00039.safetensors",
257
+ "model.layers.30.self_attn.v_proj.weight": "model-00036-of-00039.safetensors",
258
+ "model.layers.31.input_layernorm.weight": "model-00038-of-00039.safetensors",
259
+ "model.layers.31.mlp.down_proj.weight": "model-00038-of-00039.safetensors",
260
+ "model.layers.31.mlp.gate_proj.weight": "model-00038-of-00039.safetensors",
261
+ "model.layers.31.mlp.up_proj.weight": "model-00038-of-00039.safetensors",
262
+ "model.layers.31.post_attention_layernorm.weight": "model-00038-of-00039.safetensors",
263
+ "model.layers.31.self_attn.k_proj.weight": "model-00037-of-00039.safetensors",
264
+ "model.layers.31.self_attn.o_proj.weight": "model-00038-of-00039.safetensors",
265
+ "model.layers.31.self_attn.q_proj.weight": "model-00037-of-00039.safetensors",
266
+ "model.layers.31.self_attn.rotary_emb.inv_freq": "model-00038-of-00039.safetensors",
267
+ "model.layers.31.self_attn.v_proj.weight": "model-00037-of-00039.safetensors",
268
+ "model.layers.4.input_layernorm.weight": "model-00007-of-00039.safetensors",
269
+ "model.layers.4.mlp.down_proj.weight": "model-00006-of-00039.safetensors",
270
+ "model.layers.4.mlp.gate_proj.weight": "model-00006-of-00039.safetensors",
271
+ "model.layers.4.mlp.up_proj.weight": "model-00007-of-00039.safetensors",
272
+ "model.layers.4.post_attention_layernorm.weight": "model-00007-of-00039.safetensors",
273
+ "model.layers.4.self_attn.k_proj.weight": "model-00006-of-00039.safetensors",
274
+ "model.layers.4.self_attn.o_proj.weight": "model-00006-of-00039.safetensors",
275
+ "model.layers.4.self_attn.q_proj.weight": "model-00006-of-00039.safetensors",
276
+ "model.layers.4.self_attn.rotary_emb.inv_freq": "model-00006-of-00039.safetensors",
277
+ "model.layers.4.self_attn.v_proj.weight": "model-00006-of-00039.safetensors",
278
+ "model.layers.5.input_layernorm.weight": "model-00008-of-00039.safetensors",
279
+ "model.layers.5.mlp.down_proj.weight": "model-00008-of-00039.safetensors",
280
+ "model.layers.5.mlp.gate_proj.weight": "model-00007-of-00039.safetensors",
281
+ "model.layers.5.mlp.up_proj.weight": "model-00008-of-00039.safetensors",
282
+ "model.layers.5.post_attention_layernorm.weight": "model-00008-of-00039.safetensors",
283
+ "model.layers.5.self_attn.k_proj.weight": "model-00007-of-00039.safetensors",
284
+ "model.layers.5.self_attn.o_proj.weight": "model-00007-of-00039.safetensors",
285
+ "model.layers.5.self_attn.q_proj.weight": "model-00007-of-00039.safetensors",
286
+ "model.layers.5.self_attn.rotary_emb.inv_freq": "model-00007-of-00039.safetensors",
287
+ "model.layers.5.self_attn.v_proj.weight": "model-00007-of-00039.safetensors",
288
+ "model.layers.6.input_layernorm.weight": "model-00009-of-00039.safetensors",
289
+ "model.layers.6.mlp.down_proj.weight": "model-00009-of-00039.safetensors",
290
+ "model.layers.6.mlp.gate_proj.weight": "model-00009-of-00039.safetensors",
291
+ "model.layers.6.mlp.up_proj.weight": "model-00009-of-00039.safetensors",
292
+ "model.layers.6.post_attention_layernorm.weight": "model-00009-of-00039.safetensors",
293
+ "model.layers.6.self_attn.k_proj.weight": "model-00008-of-00039.safetensors",
294
+ "model.layers.6.self_attn.o_proj.weight": "model-00008-of-00039.safetensors",
295
+ "model.layers.6.self_attn.q_proj.weight": "model-00008-of-00039.safetensors",
296
+ "model.layers.6.self_attn.rotary_emb.inv_freq": "model-00008-of-00039.safetensors",
297
+ "model.layers.6.self_attn.v_proj.weight": "model-00008-of-00039.safetensors",
298
+ "model.layers.7.input_layernorm.weight": "model-00010-of-00039.safetensors",
299
+ "model.layers.7.mlp.down_proj.weight": "model-00010-of-00039.safetensors",
300
+ "model.layers.7.mlp.gate_proj.weight": "model-00010-of-00039.safetensors",
301
+ "model.layers.7.mlp.up_proj.weight": "model-00010-of-00039.safetensors",
302
+ "model.layers.7.post_attention_layernorm.weight": "model-00010-of-00039.safetensors",
303
+ "model.layers.7.self_attn.k_proj.weight": "model-00009-of-00039.safetensors",
304
+ "model.layers.7.self_attn.o_proj.weight": "model-00010-of-00039.safetensors",
305
+ "model.layers.7.self_attn.q_proj.weight": "model-00009-of-00039.safetensors",
306
+ "model.layers.7.self_attn.rotary_emb.inv_freq": "model-00010-of-00039.safetensors",
307
+ "model.layers.7.self_attn.v_proj.weight": "model-00009-of-00039.safetensors",
308
+ "model.layers.8.input_layernorm.weight": "model-00011-of-00039.safetensors",
309
+ "model.layers.8.mlp.down_proj.weight": "model-00011-of-00039.safetensors",
310
+ "model.layers.8.mlp.gate_proj.weight": "model-00011-of-00039.safetensors",
311
+ "model.layers.8.mlp.up_proj.weight": "model-00011-of-00039.safetensors",
312
+ "model.layers.8.post_attention_layernorm.weight": "model-00011-of-00039.safetensors",
313
+ "model.layers.8.self_attn.k_proj.weight": "model-00010-of-00039.safetensors",
314
+ "model.layers.8.self_attn.o_proj.weight": "model-00011-of-00039.safetensors",
315
+ "model.layers.8.self_attn.q_proj.weight": "model-00010-of-00039.safetensors",
316
+ "model.layers.8.self_attn.rotary_emb.inv_freq": "model-00011-of-00039.safetensors",
317
+ "model.layers.8.self_attn.v_proj.weight": "model-00011-of-00039.safetensors",
318
+ "model.layers.9.input_layernorm.weight": "model-00012-of-00039.safetensors",
319
+ "model.layers.9.mlp.down_proj.weight": "model-00012-of-00039.safetensors",
320
+ "model.layers.9.mlp.gate_proj.weight": "model-00012-of-00039.safetensors",
321
+ "model.layers.9.mlp.up_proj.weight": "model-00012-of-00039.safetensors",
322
+ "model.layers.9.post_attention_layernorm.weight": "model-00012-of-00039.safetensors",
323
+ "model.layers.9.self_attn.k_proj.weight": "model-00012-of-00039.safetensors",
324
+ "model.layers.9.self_attn.o_proj.weight": "model-00012-of-00039.safetensors",
325
+ "model.layers.9.self_attn.q_proj.weight": "model-00011-of-00039.safetensors",
326
+ "model.layers.9.self_attn.rotary_emb.inv_freq": "model-00012-of-00039.safetensors",
327
+ "model.layers.9.self_attn.v_proj.weight": "model-00012-of-00039.safetensors",
328
+ "model.norm.weight": "model-00038-of-00039.safetensors"
329
+ }
330
+ }