Diogo-V commited on
Commit
95f477c
1 Parent(s): 4c00648

Upload learned parameters for llama3 in bit 8

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. params/llama3/8/norm_nlr/comb_5/init/lm_head/_s.pt +3 -0
  2. params/llama3/8/norm_nlr/comb_5/init/model.layers.0.mlp.down_proj/_s.pt +3 -0
  3. params/llama3/8/norm_nlr/comb_5/init/model.layers.0.mlp.gate_proj/_s.pt +3 -0
  4. params/llama3/8/norm_nlr/comb_5/init/model.layers.0.mlp.up_proj/_s.pt +3 -0
  5. params/llama3/8/norm_nlr/comb_5/init/model.layers.0.self_attn.k_proj/_s.pt +3 -0
  6. params/llama3/8/norm_nlr/comb_5/init/model.layers.0.self_attn.o_proj/_s.pt +3 -0
  7. params/llama3/8/norm_nlr/comb_5/init/model.layers.0.self_attn.q_proj/_s.pt +3 -0
  8. params/llama3/8/norm_nlr/comb_5/init/model.layers.0.self_attn.v_proj/_s.pt +3 -0
  9. params/llama3/8/norm_nlr/comb_5/init/model.layers.1.mlp.down_proj/_s.pt +3 -0
  10. params/llama3/8/norm_nlr/comb_5/init/model.layers.1.mlp.gate_proj/_s.pt +3 -0
  11. params/llama3/8/norm_nlr/comb_5/init/model.layers.1.mlp.up_proj/_s.pt +3 -0
  12. params/llama3/8/norm_nlr/comb_5/init/model.layers.1.self_attn.k_proj/_s.pt +3 -0
  13. params/llama3/8/norm_nlr/comb_5/init/model.layers.1.self_attn.o_proj/_s.pt +3 -0
  14. params/llama3/8/norm_nlr/comb_5/init/model.layers.1.self_attn.q_proj/_s.pt +3 -0
  15. params/llama3/8/norm_nlr/comb_5/init/model.layers.1.self_attn.v_proj/_s.pt +3 -0
  16. params/llama3/8/norm_nlr/comb_5/init/model.layers.10.mlp.down_proj/_s.pt +3 -0
  17. params/llama3/8/norm_nlr/comb_5/init/model.layers.10.mlp.gate_proj/_s.pt +3 -0
  18. params/llama3/8/norm_nlr/comb_5/init/model.layers.10.mlp.up_proj/_s.pt +3 -0
  19. params/llama3/8/norm_nlr/comb_5/init/model.layers.10.self_attn.k_proj/_s.pt +3 -0
  20. params/llama3/8/norm_nlr/comb_5/init/model.layers.10.self_attn.o_proj/_s.pt +3 -0
  21. params/llama3/8/norm_nlr/comb_5/init/model.layers.10.self_attn.q_proj/_s.pt +3 -0
  22. params/llama3/8/norm_nlr/comb_5/init/model.layers.10.self_attn.v_proj/_s.pt +3 -0
  23. params/llama3/8/norm_nlr/comb_5/init/model.layers.11.mlp.down_proj/_s.pt +3 -0
  24. params/llama3/8/norm_nlr/comb_5/init/model.layers.11.mlp.gate_proj/_s.pt +3 -0
  25. params/llama3/8/norm_nlr/comb_5/init/model.layers.11.mlp.up_proj/_s.pt +3 -0
  26. params/llama3/8/norm_nlr/comb_5/init/model.layers.11.self_attn.k_proj/_s.pt +3 -0
  27. params/llama3/8/norm_nlr/comb_5/init/model.layers.11.self_attn.o_proj/_s.pt +3 -0
  28. params/llama3/8/norm_nlr/comb_5/init/model.layers.11.self_attn.q_proj/_s.pt +3 -0
  29. params/llama3/8/norm_nlr/comb_5/init/model.layers.11.self_attn.v_proj/_s.pt +3 -0
  30. params/llama3/8/norm_nlr/comb_5/init/model.layers.12.mlp.down_proj/_s.pt +3 -0
  31. params/llama3/8/norm_nlr/comb_5/init/model.layers.12.mlp.gate_proj/_s.pt +3 -0
  32. params/llama3/8/norm_nlr/comb_5/init/model.layers.12.mlp.up_proj/_s.pt +3 -0
  33. params/llama3/8/norm_nlr/comb_5/init/model.layers.12.self_attn.k_proj/_s.pt +3 -0
  34. params/llama3/8/norm_nlr/comb_5/init/model.layers.12.self_attn.o_proj/_s.pt +3 -0
  35. params/llama3/8/norm_nlr/comb_5/init/model.layers.12.self_attn.q_proj/_s.pt +3 -0
  36. params/llama3/8/norm_nlr/comb_5/init/model.layers.12.self_attn.v_proj/_s.pt +3 -0
  37. params/llama3/8/norm_nlr/comb_5/init/model.layers.13.mlp.down_proj/_s.pt +3 -0
  38. params/llama3/8/norm_nlr/comb_5/init/model.layers.13.mlp.gate_proj/_s.pt +3 -0
  39. params/llama3/8/norm_nlr/comb_5/init/model.layers.13.mlp.up_proj/_s.pt +3 -0
  40. params/llama3/8/norm_nlr/comb_5/init/model.layers.13.self_attn.k_proj/_s.pt +3 -0
  41. params/llama3/8/norm_nlr/comb_5/init/model.layers.13.self_attn.o_proj/_s.pt +3 -0
  42. params/llama3/8/norm_nlr/comb_5/init/model.layers.13.self_attn.q_proj/_s.pt +3 -0
  43. params/llama3/8/norm_nlr/comb_5/init/model.layers.13.self_attn.v_proj/_s.pt +3 -0
  44. params/llama3/8/norm_nlr/comb_5/init/model.layers.14.mlp.down_proj/_s.pt +3 -0
  45. params/llama3/8/norm_nlr/comb_5/init/model.layers.14.mlp.gate_proj/_s.pt +3 -0
  46. params/llama3/8/norm_nlr/comb_5/init/model.layers.14.mlp.up_proj/_s.pt +3 -0
  47. params/llama3/8/norm_nlr/comb_5/init/model.layers.14.self_attn.k_proj/_s.pt +3 -0
  48. params/llama3/8/norm_nlr/comb_5/init/model.layers.14.self_attn.o_proj/_s.pt +3 -0
  49. params/llama3/8/norm_nlr/comb_5/init/model.layers.14.self_attn.q_proj/_s.pt +3 -0
  50. params/llama3/8/norm_nlr/comb_5/init/model.layers.14.self_attn.v_proj/_s.pt +3 -0
params/llama3/8/norm_nlr/comb_5/init/lm_head/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7d157bc5d39bb677a854e910d02eea8e9e7b8d4abee3436f6a6d1073398422e
3
+ size 514115
params/llama3/8/norm_nlr/comb_5/init/model.layers.0.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ce85e6cbd87ae101f143bf8f46f680baee53da9bb06c2cd21af4931ba1ccb3cc
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.0.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0578426d521333ba9895fec2cc52c0e6e916ba7a35ecdadeb969441e6630b0dc
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.0.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e53cb172cfdade669f2b955981b19bfc06aa2e98ced6b6ff85bc7bdaddc4658d
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.0.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:91effca4e078009c811a30fbe5a9c76392de706fe8469cfe10e7363933d7d9be
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.0.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8112d9c9ab65a0ce17280fd1086225c4e4e7bda6018cee8f440ed9b95853f683
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.0.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8739086578509a73463535f2114fa81e6073227726dc1c589432e8d221e25bc9
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.0.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14b027128171a71fe38db7dc5c94424f27d59a3ca036e3b972d87d1e5c4e1f52
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.1.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5203d39c3e8526ad5a4dff593c34094637a69c2c935ea57662c6fea7fe48b57
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.1.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63b757cf50d223b234c1f70acb12e624bf6fdbbc9f542113d6528fde04b7b1b6
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.1.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e04c59b5621abbfb78bab184d8a919516a851db3648affc4b3ee977cd7668596
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.1.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00952bbc3052f052bd135934ec22cd1b7be2c7689084b27c26686cd050d75e29
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.1.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6ddb4a8838db713753c6473c477deb18b23b2cc46c613304939d7b9867aab805
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.1.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:108b19711b8eba4ea73a32dee9f30b785aef23876208f04b28175847c59ea203
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.1.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5390b121ae0b5a7f84e484a1de2c4cb9a4efb11c2b2617091fca75ffa4b56a06
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.10.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:62732c3a0d9834d54274a3e0e57e481097696684e480b1def6ce751edb453931
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.10.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6385d4f40eab9f06d25f8547151dea17532390d350442c16b53d6c0ab4f53cf9
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.10.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6f52d2411b27e4fb1b78dad295b59e8f9b26bcee4b3d77fe548710e1617b1c4
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.10.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9611c6775bfaa4573c29dd6f5b98c576d875a0d4a0c9eb809bdc28a05b9df15
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.10.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e962203190651e73d11793ac20c28841116f4d4cbded293f690622cdf8c48520
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.10.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8f027d28f85633f0bdefbf1703f6029740aa972de94a41ae5c544780d6a8b6d5
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.10.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6648ab9f96487d8554b7200deafd2067c9145af54ea704bf258f759792a1b3b3
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.11.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7cf2c2f070aab91da9933917a2a872f9553ab936958795bea2fcac5a1567530
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.11.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d41dcbbb393118a05c8d1d29ee34bb2971c3a4444ee55416e19afe6ee5ec610
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.11.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8cb1fd7faf6c0a7f9e06039bc7decb1efc7e7d1ac652381071295130f172eaf3
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.11.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5639a4f02669ff2362386e9a5da15d2459682f9aa00d46921ed92550c1304b0e
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.11.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b54fa8800798c623bb36b2489e6cfb64e2ab668d60a2af03d5ef9b3e9355da6a
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.11.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b65cabb1a11763a9e1e8d95622ec98b248e11925b2f49cd9483d3fcbb105eb6f
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.11.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7dcb04764b2d02cd01518a9b09ed4137cd818ce506f104bc84cbfc9540914df8
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.12.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ecbf48cec7318f4d7c5eb627cd309207685c4d69cd6c22f02310240674663ab9
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.12.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90f2182902e99951315bdba211dc0ee5b020815035118a42bc74199857d31964
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.12.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14adeed4892b8f31aab883757b9e048a11b5f346347f695dae327d78edce9d38
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.12.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3d1a229a57fb910a09694d3cc506ca8ea28019ddf193c8b734bf481a93099096
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.12.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d74e9f64ff5e99544114f0403df3dd2051e9b80bccbefd7192b907b1d827f9c9
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.12.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d0d743057ae90cb8397b4e041396f8fd12c05aff7cb64186a871f711fcfbda94
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.12.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c14b17569cf46d650b47021b000364b8e82c994a2f6ea1076c9ff17d59dc9250
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.13.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a17d70c48ab1cfefdb67f62c716ff913797f775c97f6caf6518a67a65a9b2131
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.13.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:233b1239bab76bb22d923cddc3af46ccc562984b29771dd402ffa9ebc8ee19dd
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.13.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c3569ad8b800b35ad52f95d33d719ab3e1e1d3818ddcdb1dde48fbcf21e9294e
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.13.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0edac497339c45060d24e12c75b06a002dbc2d13bfe0faee69badec5fd10f0c4
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.13.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d27148ed2bcfe3883e55951ebcbc06b57cea4cd98be37c46486f0aae8bc1f47
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.13.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d433f0a92c09f58cd000aebfd48972020d1d926cff272380be833e90aca7167b
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.13.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7b26bff29e52af2a540c42933371cfea417225972e817b9e22747347480d9bd
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.14.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35725ad12e09ffd91f0804d095dd0b9fc2ec5fa7b82d6c2b7bec062fda1a046f
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.14.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:eee632c26a4544956e0bd4fb261f190ee586c36b20a3eead4962d8f2fff57184
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.14.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:778bf235c1c697e609bd723602a4791caccd3816fc6ffbb1a85fdc0b7e9ab8c3
3
+ size 58435
params/llama3/8/norm_nlr/comb_5/init/model.layers.14.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3c07347f636289837fee3848a9d1d91128362c3dc5d255bc87f579bfdb3c98e8
3
+ size 5187
params/llama3/8/norm_nlr/comb_5/init/model.layers.14.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:09b16c99d3caafa0ecae3c4fd4929a6ff630467ad78d01e7ac7bc6ac8c90bfd1
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.14.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c0e61e9b5574c782c48fae4c8cb44fbe425d5fcde2122423c9fff80734f29708
3
+ size 17475
params/llama3/8/norm_nlr/comb_5/init/model.layers.14.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:64583fae2eb264810008957993868cc0b6bd2cb4976bd7361b80bc864f25e4c5
3
+ size 5187