Diogo-V commited on
Commit
8b63500
1 Parent(s): 71f9ed5

Adding more results

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. params/tinyllama/8/nlr_t_no_sched/comb_8/init/lm_head/_s.pt +3 -0
  2. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.mlp.down_proj/_s.pt +3 -0
  3. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.mlp.gate_proj/_s.pt +3 -0
  4. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.mlp.up_proj/_s.pt +3 -0
  5. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.self_attn.k_proj/_s.pt +3 -0
  6. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.self_attn.o_proj/_s.pt +3 -0
  7. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.self_attn.q_proj/_s.pt +3 -0
  8. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.self_attn.v_proj/_s.pt +3 -0
  9. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.mlp.down_proj/_s.pt +3 -0
  10. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.mlp.gate_proj/_s.pt +3 -0
  11. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.mlp.up_proj/_s.pt +3 -0
  12. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.self_attn.k_proj/_s.pt +3 -0
  13. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.self_attn.o_proj/_s.pt +3 -0
  14. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.self_attn.q_proj/_s.pt +3 -0
  15. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.self_attn.v_proj/_s.pt +3 -0
  16. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.mlp.down_proj/_s.pt +3 -0
  17. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.mlp.gate_proj/_s.pt +3 -0
  18. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.mlp.up_proj/_s.pt +3 -0
  19. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.self_attn.k_proj/_s.pt +3 -0
  20. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.self_attn.o_proj/_s.pt +3 -0
  21. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.self_attn.q_proj/_s.pt +3 -0
  22. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.self_attn.v_proj/_s.pt +3 -0
  23. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.mlp.down_proj/_s.pt +3 -0
  24. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.mlp.gate_proj/_s.pt +3 -0
  25. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.mlp.up_proj/_s.pt +3 -0
  26. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.self_attn.k_proj/_s.pt +3 -0
  27. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.self_attn.o_proj/_s.pt +3 -0
  28. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.self_attn.q_proj/_s.pt +3 -0
  29. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.self_attn.v_proj/_s.pt +3 -0
  30. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.mlp.down_proj/_s.pt +3 -0
  31. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.mlp.gate_proj/_s.pt +3 -0
  32. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.mlp.up_proj/_s.pt +3 -0
  33. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.self_attn.k_proj/_s.pt +3 -0
  34. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.self_attn.o_proj/_s.pt +3 -0
  35. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.self_attn.q_proj/_s.pt +3 -0
  36. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.self_attn.v_proj/_s.pt +3 -0
  37. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.mlp.down_proj/_s.pt +3 -0
  38. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.mlp.gate_proj/_s.pt +3 -0
  39. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.mlp.up_proj/_s.pt +3 -0
  40. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.self_attn.k_proj/_s.pt +3 -0
  41. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.self_attn.o_proj/_s.pt +3 -0
  42. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.self_attn.q_proj/_s.pt +3 -0
  43. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.self_attn.v_proj/_s.pt +3 -0
  44. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.mlp.down_proj/_s.pt +3 -0
  45. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.mlp.gate_proj/_s.pt +3 -0
  46. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.mlp.up_proj/_s.pt +3 -0
  47. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.self_attn.k_proj/_s.pt +3 -0
  48. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.self_attn.o_proj/_s.pt +3 -0
  49. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.self_attn.q_proj/_s.pt +3 -0
  50. params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.self_attn.v_proj/_s.pt +3 -0
params/tinyllama/8/nlr_t_no_sched/comb_8/init/lm_head/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cabe188c502c25ccf81ffa10fd5ebd00e7255ab93d0cbb5370f6d47492782c40
3
+ size 129091
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9eba5629546870a88692cf14c55cc7f571f1804b38f3fa7887174cc616236187
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d8fd74e8d67f713abeba705cf47f182d9c8330224ec71c0ab1aae2fb6766d9c
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f9f8ab4fe5a0ffc66f10471080ab61addb0fec1c6ed4c7519007853cc1fc860
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef859b2e0c194c6c5ac59c9f2aa2459e4166bc68865b3cd1d0a6be4ed50c567c
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:148a7b02dcd848d47c84a353df05d0d3e8a1954d2ee8e04ad0977197cba670a3
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c75001acc17bdd582ca9e98cc82d583d903e6518bdb107ee262326399f53fb9b
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.0.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:424edb20e4842eb142a2df8fa4cf9fc72fe77f8c8e8201d8a6552e1e21a7882d
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a37a5e0c3ca4913e02eb948d0833900f7cf293ab04ff350526e11dfbeb81fec
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12b4f835478bfdef2991beed54edcb9ec2966457510e6e09c66359c868e921b6
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a312c0ea726f5fa385c2c5da55112d5ab0259854a75f82e2f5db12c820132243
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3501fffd12054a6df2cafe47e79410609ecc05376cb3173ba8154a935e20a864
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c985bef4f103348679156a67125ec5d1c5a3ab2c68cb07d60ec8fb9705fe600e
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:44ed707b0a2764614e8b11981ba9f3cfa5416bf9957df5edd5fa01c68d9a25ee
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.1.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fd09e9624578b6af604cb11dd5e54a07dee1cd473495088ab4a998189274aa23
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4acb1b5e3f0e4572629ffd16a48d21ce3abc9a262ad475b7149a7b134479d7c8
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ead1507ee42fbec012396ef59c0ed16bcc9e056ea26c929b9e56ff7b454d327d
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6243f1826d051f1cf32c447d45a90a8c73d30512d403520e15c672672f5c11d6
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60f28246c09ba2707e47b49cd91eec5565b8cde5a668e0beec9ddb36a0d96d80
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3fc90ba8a0094998a98980ba26f8e311c37948eab374ab62110b29e014a163c6
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07d040858af0e6f2bd6b288f3d0f380605d7c8c68a067bbb25980b6cc5e8a7a0
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.10.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:45f29613d4c2ca3b54482559103bcbae84bd4cc7243c2ec80a780b1024be6ed4
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ce39b48c2e901a1b9e1542d22f3a376c79388b1645d7e436b4dab2486e6b13e
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78ff8f07705f4a1a39dd989c08aaee2bec04da08a717904cbdfcff9f0b826a3c
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d169b8c8f0a2a4d4782d582fd1c8b2bc26e29048456ff28e76ee13143a84cda0
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:58d9de4c35b70f7fcd6037b9bd5f4a03669dc8932f848902726208a7392c39aa
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be159831e30e6ae0e9c5897200fc229620188162ec2811192610cad32bf8e8ec
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:39eb935938a45f7a32067dc8546c94544bbdffa3925604780463a7a7b77f207c
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.11.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37ef247e4f478c7ad3c022caac00d3f7bef631fc5c46a5a2d3cabac1fe075098
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f195f6d11561a1a0a84531a81e793557389469faab79d950e8c50bb8a8478309
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:96eec2f48aed77a0841e392b0b541e4063ba9d9d0c7f38e31057029758749d55
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08c68dfd75954b6e7268ec0e291806d60a3abe13d1a08f6cfc8c0285c04c4e11
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:439232fcfecf3813638e9080d38e55120267d379a9c59fb70542e71d70e45a48
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d32a3079bc9f8bad5917e47927787c189b64191353d62c2468ed0c3568b44009
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17a9bc6756df1f30eeebfa1738c9a14b2521ffaf22a9f55a1da82b2d3710412a
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.12.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a67c9835f49cec02423316474db80883052ce2f4861743108f4d2b9446475fd7
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6609488d8cf97639bd9b022d12b3f11d158e2c75838193b216281fc98770bab
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf091f6e0740944db75a861b238191af613466acd90f3414069606841cef9627
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f01e32d761d5260807f771a4d2062dc0949be386aa37ed008f26fce34d9db5b3
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0519b8ddba97fa1d775a833436aeb58731cb347a4709dc0d73daded5d64424f7
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7edd2d906006080cbdd6e83866a74cd3d4599d68412711664e448134de7748da
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6d68ca09e8ed0f08ff4be0dba684c5db4d88c531b71bfcfa6724af06b5311e89
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.13.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2f718c2675820c5c3517473df37f30ad83b457ec36002845db0b66f7e26fc6c9
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.mlp.down_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ee272627de606dbda53d346fb10bcd9f05e7c0fa1fe31017ef647b6ef09fcc6f
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.mlp.gate_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:18575755eef4aed7f921c50b84c490060a5bf92fca6c198d716eb4a6063e1ef9
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.mlp.up_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2b4a339230cec9397d1c50d1e6fbe476c2cc1feb78c0ac6f04dc112c8006543e
3
+ size 23619
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.self_attn.k_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:696000a913f87655311df68480aec767afd467b9488c4765ff29554cb2cb67c9
3
+ size 2115
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.self_attn.o_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a8adb890d96e4db84aab37a82f3b47ffd2f44eb9f9d724e4c72d1556a34f1c1
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.self_attn.q_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f2e432883d66180d1a9e388311960703d5c51098ed899963c95eef7a70918fd
3
+ size 9283
params/tinyllama/8/nlr_t_no_sched/comb_8/init/model.layers.14.self_attn.v_proj/_s.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c9b472624b7f257759862b821bfac620e84801b42c54962e0b9e58f105e7f1c
3
+ size 2115