Diogo-V commited on
Commit
9ae3ab2
1 Parent(s): 8b63500

Adding more results

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. params/tinyllama/7/nlr_t_no_sched/comb_10/init/lm_head/_0.pt +3 -0
  2. params/tinyllama/7/nlr_t_no_sched/comb_10/init/lm_head/_s.pt +3 -0
  3. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.mlp.down_proj/_0.pt +3 -0
  4. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.mlp.down_proj/_s.pt +3 -0
  5. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.mlp.gate_proj/_0.pt +3 -0
  6. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.mlp.gate_proj/_s.pt +3 -0
  7. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.mlp.up_proj/_0.pt +3 -0
  8. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.mlp.up_proj/_s.pt +3 -0
  9. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.k_proj/_0.pt +3 -0
  10. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.k_proj/_s.pt +3 -0
  11. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.o_proj/_0.pt +3 -0
  12. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.o_proj/_s.pt +3 -0
  13. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.q_proj/_0.pt +3 -0
  14. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.q_proj/_s.pt +3 -0
  15. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.v_proj/_0.pt +3 -0
  16. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.v_proj/_s.pt +3 -0
  17. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.mlp.down_proj/_0.pt +3 -0
  18. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.mlp.down_proj/_s.pt +3 -0
  19. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.mlp.gate_proj/_0.pt +3 -0
  20. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.mlp.gate_proj/_s.pt +3 -0
  21. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.mlp.up_proj/_0.pt +3 -0
  22. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.mlp.up_proj/_s.pt +3 -0
  23. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.k_proj/_0.pt +3 -0
  24. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.k_proj/_s.pt +3 -0
  25. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.o_proj/_0.pt +3 -0
  26. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.o_proj/_s.pt +3 -0
  27. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.q_proj/_0.pt +3 -0
  28. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.q_proj/_s.pt +3 -0
  29. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.v_proj/_0.pt +3 -0
  30. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.v_proj/_s.pt +3 -0
  31. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.mlp.down_proj/_0.pt +3 -0
  32. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.mlp.down_proj/_s.pt +3 -0
  33. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.mlp.gate_proj/_0.pt +3 -0
  34. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.mlp.gate_proj/_s.pt +3 -0
  35. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.mlp.up_proj/_0.pt +3 -0
  36. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.mlp.up_proj/_s.pt +3 -0
  37. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.k_proj/_0.pt +3 -0
  38. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.k_proj/_s.pt +3 -0
  39. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.o_proj/_0.pt +3 -0
  40. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.o_proj/_s.pt +3 -0
  41. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.q_proj/_0.pt +3 -0
  42. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.q_proj/_s.pt +3 -0
  43. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.v_proj/_0.pt +3 -0
  44. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.v_proj/_s.pt +3 -0
  45. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.11.mlp.down_proj/_0.pt +3 -0
  46. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.11.mlp.down_proj/_s.pt +3 -0
  47. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.11.mlp.gate_proj/_0.pt +3 -0
  48. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.11.mlp.gate_proj/_s.pt +3 -0
  49. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.11.mlp.up_proj/_0.pt +3 -0
  50. params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.11.mlp.up_proj/_s.pt +3 -0
params/tinyllama/7/nlr_t_no_sched/comb_10/init/lm_head/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aed9996b99fa85167d37b20f6ff06fdf94f84478a6856b00f15a8a219e0ac626
3
+ size 129091
params/tinyllama/7/nlr_t_no_sched/comb_10/init/lm_head/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c3222a79d6d96809c559b3330d72d14ece2a912d9060cc3cc31bb4e387df491
3
+ size 129091
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e63504265e3e90db90dacdfba488b4f80df3b784e29144329f2e394e84830fd
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:66bb6f00960dad2333d0368221a67ed4fbc428b09a8c370c33fa3112b6e5fc2c
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51cbc3c4f07b43ba20d2e94e3d80abde1da79b3b3649f0d5acc261c518bf7a03
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fa5a1f841423f45e29e0dc244a2403c560758c56744757f042f902cca86c4e48
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51cbc3c4f07b43ba20d2e94e3d80abde1da79b3b3649f0d5acc261c518bf7a03
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:183ddc9bde4bdb0c14d58a1de6e4a4b47832e1b6251ba19d831f04cbb7331e33
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.k_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de6565f1e5ddf83a31c04b32977241287b6ae5917482e9c5bd0a99696a0828b5
3
+ size 2115
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c6e542fd495dee711ab1ac873f19682be7d9f5a242b60e67d1546969e62b850
3
+ size 2115
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.o_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e63504265e3e90db90dacdfba488b4f80df3b784e29144329f2e394e84830fd
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b94a16a6478dfd557a29e5693f657add1ddedec7ccd6a228d953523b1de37528
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.q_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e63504265e3e90db90dacdfba488b4f80df3b784e29144329f2e394e84830fd
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:308cfb6ddfafd511bbf9439d306d8070efb75f2eae995567666615807e72a429
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.v_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de6565f1e5ddf83a31c04b32977241287b6ae5917482e9c5bd0a99696a0828b5
3
+ size 2115
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.0.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33b1d55775a7ca32da65f735841bc8cb730d094d39c88de6f8d6d994ce29165b
3
+ size 2115
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e63504265e3e90db90dacdfba488b4f80df3b784e29144329f2e394e84830fd
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83518c0e20230fd150f3ace219534b2ab501800e231041bd53cb776af99ab46a
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51cbc3c4f07b43ba20d2e94e3d80abde1da79b3b3649f0d5acc261c518bf7a03
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7db1ebb9f7be3cd4ded128b4db761f203e3e415c6ea0813c5528ac1681abd85e
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51cbc3c4f07b43ba20d2e94e3d80abde1da79b3b3649f0d5acc261c518bf7a03
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12f4618790dafe024ca4d0a82770de741606cabc6483e47a0b523aa2d8395d5e
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.k_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de6565f1e5ddf83a31c04b32977241287b6ae5917482e9c5bd0a99696a0828b5
3
+ size 2115
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5c34a6cffb5904e8c26f55bd2b2ca2e9d637205fb740aed52b9019768cab6ce
3
+ size 2115
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.o_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e63504265e3e90db90dacdfba488b4f80df3b784e29144329f2e394e84830fd
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9ace186fa7f0c0edee332267ca8537a7fc88e4c4a39b1ea0e5b67650b2c83ae4
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.q_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e63504265e3e90db90dacdfba488b4f80df3b784e29144329f2e394e84830fd
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0dadd0c57eef9beaa1fd6331e004abf55ffdcf418b7c142854a5fc37dc7d0ca5
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.v_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de6565f1e5ddf83a31c04b32977241287b6ae5917482e9c5bd0a99696a0828b5
3
+ size 2115
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.1.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a49ddfd3fe8c85c98de761137bc041bc69e588f9e397cf086d35d53c355546f
3
+ size 2115
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e63504265e3e90db90dacdfba488b4f80df3b784e29144329f2e394e84830fd
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b59ca7322eb8dc13312bf592017e9a852b84b9f9b361f0c8e159e647ccfb8061
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51cbc3c4f07b43ba20d2e94e3d80abde1da79b3b3649f0d5acc261c518bf7a03
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6610c2d5b047fe7b94dec680aa32b85df71d8809c656ef0570aa34b7932f7aee
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51cbc3c4f07b43ba20d2e94e3d80abde1da79b3b3649f0d5acc261c518bf7a03
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a96a8de87c87550dd6f5e2e03bc6d3d1359bb5eb19f5338ca66d68c8592c4120
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.k_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de6565f1e5ddf83a31c04b32977241287b6ae5917482e9c5bd0a99696a0828b5
3
+ size 2115
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:30642812bc6956ef9376980422b9e7129e751930fd2a8f878bdda09aef0bd9b5
3
+ size 2115
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.o_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e63504265e3e90db90dacdfba488b4f80df3b784e29144329f2e394e84830fd
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4915b35b3f5f15690004e0af6bcca8dbcbef387c098e28ec068968c293113031
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.q_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e63504265e3e90db90dacdfba488b4f80df3b784e29144329f2e394e84830fd
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:75468ecbf71a558f4c140f54e660836d99846cf180837e98ed7b5160144143cc
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.v_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de6565f1e5ddf83a31c04b32977241287b6ae5917482e9c5bd0a99696a0828b5
3
+ size 2115
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.10.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bedb1d8145facef285ea11403b3ea10dab155f4c8eab32339a0da0a6dfcb6e40
3
+ size 2115
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.11.mlp.down_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8e63504265e3e90db90dacdfba488b4f80df3b784e29144329f2e394e84830fd
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.11.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7f22025fa42eed3f57bc447e0926049caef7b371a80a0d555800ec850d9208f
3
+ size 9283
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.11.mlp.gate_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51cbc3c4f07b43ba20d2e94e3d80abde1da79b3b3649f0d5acc261c518bf7a03
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.11.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3b69fb7efcd3a2b0c421aeebfa1740e445fc63df2d285e4d16445160c2c0fca
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.11.mlp.up_proj/_0.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51cbc3c4f07b43ba20d2e94e3d80abde1da79b3b3649f0d5acc261c518bf7a03
3
+ size 23619
params/tinyllama/7/nlr_t_no_sched/comb_10/init/model.layers.11.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1170b0888f52da3b55d1816778afe0e368c3feb42e9006483050206d62a2c9a
3
+ size 23619