Diogo-V commited on
Commit
be27136
1 Parent(s): 72f9243

Upload learned parameters for llama3 in bit 8

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. params/llama3/8/norm_nlr/comb_1/init/lm_head/_s.pt +3 -0
  2. params/llama3/8/norm_nlr/comb_1/init/model.layers.0.mlp.down_proj/_s.pt +3 -0
  3. params/llama3/8/norm_nlr/comb_1/init/model.layers.0.mlp.gate_proj/_s.pt +3 -0
  4. params/llama3/8/norm_nlr/comb_1/init/model.layers.0.mlp.up_proj/_s.pt +3 -0
  5. params/llama3/8/norm_nlr/comb_1/init/model.layers.0.self_attn.k_proj/_s.pt +3 -0
  6. params/llama3/8/norm_nlr/comb_1/init/model.layers.0.self_attn.o_proj/_s.pt +3 -0
  7. params/llama3/8/norm_nlr/comb_1/init/model.layers.0.self_attn.q_proj/_s.pt +3 -0
  8. params/llama3/8/norm_nlr/comb_1/init/model.layers.0.self_attn.v_proj/_s.pt +3 -0
  9. params/llama3/8/norm_nlr/comb_1/init/model.layers.1.mlp.down_proj/_s.pt +3 -0
  10. params/llama3/8/norm_nlr/comb_1/init/model.layers.1.mlp.gate_proj/_s.pt +3 -0
  11. params/llama3/8/norm_nlr/comb_1/init/model.layers.1.mlp.up_proj/_s.pt +3 -0
  12. params/llama3/8/norm_nlr/comb_1/init/model.layers.1.self_attn.k_proj/_s.pt +3 -0
  13. params/llama3/8/norm_nlr/comb_1/init/model.layers.1.self_attn.o_proj/_s.pt +3 -0
  14. params/llama3/8/norm_nlr/comb_1/init/model.layers.1.self_attn.q_proj/_s.pt +3 -0
  15. params/llama3/8/norm_nlr/comb_1/init/model.layers.1.self_attn.v_proj/_s.pt +3 -0
  16. params/llama3/8/norm_nlr/comb_1/init/model.layers.10.mlp.down_proj/_s.pt +3 -0
  17. params/llama3/8/norm_nlr/comb_1/init/model.layers.10.mlp.gate_proj/_s.pt +3 -0
  18. params/llama3/8/norm_nlr/comb_1/init/model.layers.10.mlp.up_proj/_s.pt +3 -0
  19. params/llama3/8/norm_nlr/comb_1/init/model.layers.10.self_attn.k_proj/_s.pt +3 -0
  20. params/llama3/8/norm_nlr/comb_1/init/model.layers.10.self_attn.o_proj/_s.pt +3 -0
  21. params/llama3/8/norm_nlr/comb_1/init/model.layers.10.self_attn.q_proj/_s.pt +3 -0
  22. params/llama3/8/norm_nlr/comb_1/init/model.layers.10.self_attn.v_proj/_s.pt +3 -0
  23. params/llama3/8/norm_nlr/comb_1/init/model.layers.11.mlp.down_proj/_s.pt +3 -0
  24. params/llama3/8/norm_nlr/comb_1/init/model.layers.11.mlp.gate_proj/_s.pt +3 -0
  25. params/llama3/8/norm_nlr/comb_1/init/model.layers.11.mlp.up_proj/_s.pt +3 -0
  26. params/llama3/8/norm_nlr/comb_1/init/model.layers.11.self_attn.k_proj/_s.pt +3 -0
  27. params/llama3/8/norm_nlr/comb_1/init/model.layers.11.self_attn.o_proj/_s.pt +3 -0
  28. params/llama3/8/norm_nlr/comb_1/init/model.layers.11.self_attn.q_proj/_s.pt +3 -0
  29. params/llama3/8/norm_nlr/comb_1/init/model.layers.11.self_attn.v_proj/_s.pt +3 -0
  30. params/llama3/8/norm_nlr/comb_1/init/model.layers.12.mlp.down_proj/_s.pt +3 -0
  31. params/llama3/8/norm_nlr/comb_1/init/model.layers.12.mlp.gate_proj/_s.pt +3 -0
  32. params/llama3/8/norm_nlr/comb_1/init/model.layers.12.mlp.up_proj/_s.pt +3 -0
  33. params/llama3/8/norm_nlr/comb_1/init/model.layers.12.self_attn.k_proj/_s.pt +3 -0
  34. params/llama3/8/norm_nlr/comb_1/init/model.layers.12.self_attn.o_proj/_s.pt +3 -0
  35. params/llama3/8/norm_nlr/comb_1/init/model.layers.12.self_attn.q_proj/_s.pt +3 -0
  36. params/llama3/8/norm_nlr/comb_1/init/model.layers.12.self_attn.v_proj/_s.pt +3 -0
  37. params/llama3/8/norm_nlr/comb_1/init/model.layers.13.mlp.down_proj/_s.pt +3 -0
  38. params/llama3/8/norm_nlr/comb_1/init/model.layers.13.mlp.gate_proj/_s.pt +3 -0
  39. params/llama3/8/norm_nlr/comb_1/init/model.layers.13.mlp.up_proj/_s.pt +3 -0
  40. params/llama3/8/norm_nlr/comb_1/init/model.layers.13.self_attn.k_proj/_s.pt +3 -0
  41. params/llama3/8/norm_nlr/comb_1/init/model.layers.13.self_attn.o_proj/_s.pt +3 -0
  42. params/llama3/8/norm_nlr/comb_1/init/model.layers.13.self_attn.q_proj/_s.pt +3 -0
  43. params/llama3/8/norm_nlr/comb_1/init/model.layers.13.self_attn.v_proj/_s.pt +3 -0
  44. params/llama3/8/norm_nlr/comb_1/init/model.layers.14.mlp.down_proj/_s.pt +3 -0
  45. params/llama3/8/norm_nlr/comb_1/init/model.layers.14.mlp.gate_proj/_s.pt +3 -0
  46. params/llama3/8/norm_nlr/comb_1/init/model.layers.14.mlp.up_proj/_s.pt +3 -0
  47. params/llama3/8/norm_nlr/comb_1/init/model.layers.14.self_attn.k_proj/_s.pt +3 -0
  48. params/llama3/8/norm_nlr/comb_1/init/model.layers.14.self_attn.o_proj/_s.pt +3 -0
  49. params/llama3/8/norm_nlr/comb_1/init/model.layers.14.self_attn.q_proj/_s.pt +3 -0
  50. params/llama3/8/norm_nlr/comb_1/init/model.layers.14.self_attn.v_proj/_s.pt +3 -0
params/llama3/8/norm_nlr/comb_1/init/lm_head/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd940563dcfb4922ec37a3f5b41dc8be8ef40d2d06bc47fb8c28438d0534835a
3
+ size 514115
params/llama3/8/norm_nlr/comb_1/init/model.layers.0.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15e2c7d9e67ea4da226b810c8c0bc8e097207c4acfcf98b3769332eced9f0a3e
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.0.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:337cd1091b50a41f0aa282e3850f3087b805743978aa9e45edd7a497413dfe7f
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.0.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:81379c81e4b94f8a4a0cbc2ee446eada4d943259127f83568c32f46d4c7a13f7
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.0.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a97ca6b8690ad73ce89ce8799ee20b0b55922f36956ac340e15feb3523dca88
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.0.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:427a1fb8ed8fd3d43b78d0e51f0794e2648876ef3cae02089769e508bc73d299
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.0.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b409385c173620d43b81d4a7b62aa89fecdbf0d70871592adb06cd815311b131
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.0.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe282fabde4d7360c443b9d87269a1327de11108d643b810040ef5f729470adb
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.1.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66619e8f70975848bb3f650c8e22f95d01f4d43501ccb4d81b66b080961625a0
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.1.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f69fc8d38de0da3ee382b25282f5ecea198f65768bc68cba6ee7f59b0c97cdb4
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.1.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa2cb1dbc2afceab570945659941077a1bfc131f8723d4a8406ed88084b0d7e9
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.1.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bdfe36ea564462b9a753df11b3f2603d2b86c0a99e6822bcfe412bac3bf6c911
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.1.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a7bf9fd30b8c4eb0536db41b841811508d1a77346e61760a1c0f1ea37b032d2
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.1.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9c1d8687d0e2499d4a22e829d80ae3f340bddda86363f3a0f41ce16671df5f83
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.1.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a411d580f2c55005a43074d5f8146a369b34500f5f246d70983a56f0739e2f12
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.10.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dfa1b51c567e5e7847c800e186a3737b53ac67c40c70f6e4b14192c990a193e5
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.10.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78406ce85f555ae340a02d984beb42d8d217d7226e496e7bfa386e1ad62969de
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.10.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e01299de42db3e866c42e4d2b6658ccc1f9af3b11b4bf9e2f709795385b944a6
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.10.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3aece79208a9db4cf826fcdd2ac1ee3661840c2192337865a6aefdda07af7107
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.10.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:15beee55732c0735e22bd5a8d3ceaf1e0e530e5b99b2400cb4d79bb66ffd673a
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.10.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c961b7d173a94a902506081dbdb9bda2f216a5fcd790a59a0a5d8efcbc4b646c
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.10.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9d64009ab958ed24167318b1874c166b067f53a2dbe4f7c6d78060cd1498897
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.11.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07ad9a5910ee46bc861d938e47cf34af71aa0460c82a16f8339d6f6875573923
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.11.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cd204671d6452b4f60f0e09f780988c33a0beaa1e2afd2aec9ecc48ffd2e24e6
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.11.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:add789156e71e8d842f1d456db8ad507fd83a09d5f8fc8ae7752bac92bf6f56a
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.11.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba230316ee394eb1adf2b784d65ad9956a3c73c9ddbeccd541e2f9968cea0457
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.11.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eeb2b4bfd2d8760540f8cd3f139cfddf2add4e4227d0340fdbc007815712fed0
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.11.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c90e54c960f2a10ffb09c97fb37fbbf8ff2bb4a1a6cbd7a22bb458a7e7bb800
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.11.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:84bbd7a8271cbfdb12a54110f806f022d91d5ec930858e9cc119544bc28d3c1d
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.12.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5329809e39ad128990c2227a6cca3a2d2fb12ba75c2679273eab8adb28b2d89a
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.12.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:df6f16d21c35ef4b822ed0f0cde295801848d9f417b177b3fbc6c25ad3aefd58
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.12.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:582acc79f04eed450dbb962d4e3f56b25f3a0c897b8d7d6500e341dd31c0ef09
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.12.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d62703ffb0a33c75ef9f1e650a3c2e3da6c1973fabe8c242d9745643de31fb9f
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.12.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e585fd962bdfdc83ffddf4c114ac2e85503915ad6c6ae114e0d1e46113af331a
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.12.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3b43627e5332004d76914a894df24d46fe98338a5d413b420034d3319528cc3c
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.12.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f821ce27645ab5ae7739c948df522dbd6a95c0cf2da18350aa4da2f70f224a29
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.13.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11130dad2dcaa51dc560795e8ea1b09030f1894b4f9805023f67c71d81ff81aa
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.13.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:401913b2ef90c0c28047dc7edb309c947b1b79bddcd4b9ce8dcbc72d85721e47
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.13.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6262465c06cd755b8845a703991a3436df7c6cd56188cb37606b25e667065867
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.13.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2facca5295dffd517f9cc36f494418c534f4ab08804e674b63c68397402e6744
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.13.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc8bcef823c97a30c54e75f6fb53a9c4e32f9f4b40f0a71db3e0499c0f0de187
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.13.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f89f755ae4bb48bfc1c072a6886eaa158153a5faa6085811fb903f32f5adb8b9
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.13.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7b55ce0d62532eca8bb7e6988461945c582a6d0550a194f8af8e4ea6e6428c13
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.14.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca89952766f8ebd95e9e8e9e71b78aa232ab6eeca1475207cd855aafe51046c6
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.14.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db6f6804f52661445282e34de7194937a58662e2424e9e76df1cd51551c2aca8
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.14.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a35ec5d2441ed23c226de55de200a94efbaa7584d772c7f155b95a48c9deb7f7
3
+ size 58435
params/llama3/8/norm_nlr/comb_1/init/model.layers.14.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e034398aa7607ff1da4ce99be39df8b1be8b964e083844e9ace1b6517830f9fe
3
+ size 5187
params/llama3/8/norm_nlr/comb_1/init/model.layers.14.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3634e4cdb4171ce1ff0e114bc2bbf11847fe2fcd9d86ba6733f6fbb216464fb8
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.14.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:89ea9f302d89e97970ce4efcc644469841892282a94f5a0d0b7370b6c70116de
3
+ size 17475
params/llama3/8/norm_nlr/comb_1/init/model.layers.14.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c7497f31e6326cf2440afca029b55b655610d76aad8f1dae20ccbbbc98f2e389
3
+ size 5187