pglo commited on
Commit
c4e58e9
1 Parent(s): 138bd64

Upload folder using huggingface_hub

Browse files
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:02bfb07b081a97e16ba1222a980a4cb7bf5071981da538b0ed8247566773d870
3
- size 4998338056
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a6d7ef044775c8fb2ab485f33a87de2dde4a3c44968ebfe03e53617691ffd84
3
+ size 4998337960
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:feb5b364c9438d060ab89931bd62c7065e32495d8cd2c5aff6bea5baf5d1a724
3
- size 543078152
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd49c9b55dd05b309de0173327cf383bae563b8e9733476bf75257703eb583ab
3
+ size 543078144
model.safetensors.index.json CHANGED
@@ -72,7 +72,7 @@
72
  "model.mamba_layers.0.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
73
  "model.mamba_layers.0.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
74
  "model.mamba_layers.0.mamba.dt_bias": "model-00001-of-00002.safetensors",
75
- "model.mamba_layers.0.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
76
  "model.mamba_layers.0.mamba.norm.weight": "model-00001-of-00002.safetensors",
77
  "model.mamba_layers.0.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
78
  "model.mamba_layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -81,7 +81,7 @@
81
  "model.mamba_layers.1.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
82
  "model.mamba_layers.1.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
83
  "model.mamba_layers.1.mamba.dt_bias": "model-00001-of-00002.safetensors",
84
- "model.mamba_layers.1.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
85
  "model.mamba_layers.1.mamba.norm.weight": "model-00001-of-00002.safetensors",
86
  "model.mamba_layers.1.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
87
  "model.mamba_layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -90,7 +90,7 @@
90
  "model.mamba_layers.10.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
91
  "model.mamba_layers.10.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
92
  "model.mamba_layers.10.mamba.dt_bias": "model-00001-of-00002.safetensors",
93
- "model.mamba_layers.10.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
94
  "model.mamba_layers.10.mamba.norm.weight": "model-00001-of-00002.safetensors",
95
  "model.mamba_layers.10.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
96
  "model.mamba_layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -99,7 +99,7 @@
99
  "model.mamba_layers.11.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
100
  "model.mamba_layers.11.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
101
  "model.mamba_layers.11.mamba.dt_bias": "model-00001-of-00002.safetensors",
102
- "model.mamba_layers.11.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
103
  "model.mamba_layers.11.mamba.norm.weight": "model-00001-of-00002.safetensors",
104
  "model.mamba_layers.11.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
105
  "model.mamba_layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -108,7 +108,7 @@
108
  "model.mamba_layers.12.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
109
  "model.mamba_layers.12.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
110
  "model.mamba_layers.12.mamba.dt_bias": "model-00001-of-00002.safetensors",
111
- "model.mamba_layers.12.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
112
  "model.mamba_layers.12.mamba.norm.weight": "model-00001-of-00002.safetensors",
113
  "model.mamba_layers.12.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
114
  "model.mamba_layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -117,7 +117,7 @@
117
  "model.mamba_layers.13.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
118
  "model.mamba_layers.13.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
119
  "model.mamba_layers.13.mamba.dt_bias": "model-00001-of-00002.safetensors",
120
- "model.mamba_layers.13.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
121
  "model.mamba_layers.13.mamba.norm.weight": "model-00001-of-00002.safetensors",
122
  "model.mamba_layers.13.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
123
  "model.mamba_layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -126,7 +126,7 @@
126
  "model.mamba_layers.14.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
127
  "model.mamba_layers.14.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
128
  "model.mamba_layers.14.mamba.dt_bias": "model-00001-of-00002.safetensors",
129
- "model.mamba_layers.14.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
130
  "model.mamba_layers.14.mamba.norm.weight": "model-00001-of-00002.safetensors",
131
  "model.mamba_layers.14.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
132
  "model.mamba_layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -135,7 +135,7 @@
135
  "model.mamba_layers.15.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
136
  "model.mamba_layers.15.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
137
  "model.mamba_layers.15.mamba.dt_bias": "model-00001-of-00002.safetensors",
138
- "model.mamba_layers.15.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
139
  "model.mamba_layers.15.mamba.norm.weight": "model-00001-of-00002.safetensors",
140
  "model.mamba_layers.15.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
141
  "model.mamba_layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -144,7 +144,7 @@
144
  "model.mamba_layers.16.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
145
  "model.mamba_layers.16.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
146
  "model.mamba_layers.16.mamba.dt_bias": "model-00001-of-00002.safetensors",
147
- "model.mamba_layers.16.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
148
  "model.mamba_layers.16.mamba.norm.weight": "model-00001-of-00002.safetensors",
149
  "model.mamba_layers.16.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
150
  "model.mamba_layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -153,7 +153,7 @@
153
  "model.mamba_layers.17.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
154
  "model.mamba_layers.17.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
155
  "model.mamba_layers.17.mamba.dt_bias": "model-00001-of-00002.safetensors",
156
- "model.mamba_layers.17.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
157
  "model.mamba_layers.17.mamba.norm.weight": "model-00001-of-00002.safetensors",
158
  "model.mamba_layers.17.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
159
  "model.mamba_layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -162,7 +162,7 @@
162
  "model.mamba_layers.18.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
163
  "model.mamba_layers.18.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
164
  "model.mamba_layers.18.mamba.dt_bias": "model-00001-of-00002.safetensors",
165
- "model.mamba_layers.18.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
166
  "model.mamba_layers.18.mamba.norm.weight": "model-00001-of-00002.safetensors",
167
  "model.mamba_layers.18.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
168
  "model.mamba_layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -171,7 +171,7 @@
171
  "model.mamba_layers.19.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
172
  "model.mamba_layers.19.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
173
  "model.mamba_layers.19.mamba.dt_bias": "model-00001-of-00002.safetensors",
174
- "model.mamba_layers.19.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
175
  "model.mamba_layers.19.mamba.norm.weight": "model-00001-of-00002.safetensors",
176
  "model.mamba_layers.19.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
177
  "model.mamba_layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -180,7 +180,7 @@
180
  "model.mamba_layers.2.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
181
  "model.mamba_layers.2.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
182
  "model.mamba_layers.2.mamba.dt_bias": "model-00001-of-00002.safetensors",
183
- "model.mamba_layers.2.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
184
  "model.mamba_layers.2.mamba.norm.weight": "model-00001-of-00002.safetensors",
185
  "model.mamba_layers.2.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
186
  "model.mamba_layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -189,7 +189,7 @@
189
  "model.mamba_layers.20.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
190
  "model.mamba_layers.20.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
191
  "model.mamba_layers.20.mamba.dt_bias": "model-00001-of-00002.safetensors",
192
- "model.mamba_layers.20.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
193
  "model.mamba_layers.20.mamba.norm.weight": "model-00001-of-00002.safetensors",
194
  "model.mamba_layers.20.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
195
  "model.mamba_layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -198,7 +198,7 @@
198
  "model.mamba_layers.21.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
199
  "model.mamba_layers.21.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
200
  "model.mamba_layers.21.mamba.dt_bias": "model-00001-of-00002.safetensors",
201
- "model.mamba_layers.21.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
202
  "model.mamba_layers.21.mamba.norm.weight": "model-00001-of-00002.safetensors",
203
  "model.mamba_layers.21.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
204
  "model.mamba_layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -207,7 +207,7 @@
207
  "model.mamba_layers.22.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
208
  "model.mamba_layers.22.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
209
  "model.mamba_layers.22.mamba.dt_bias": "model-00001-of-00002.safetensors",
210
- "model.mamba_layers.22.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
211
  "model.mamba_layers.22.mamba.norm.weight": "model-00001-of-00002.safetensors",
212
  "model.mamba_layers.22.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
213
  "model.mamba_layers.23.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -216,7 +216,7 @@
216
  "model.mamba_layers.23.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
217
  "model.mamba_layers.23.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
218
  "model.mamba_layers.23.mamba.dt_bias": "model-00001-of-00002.safetensors",
219
- "model.mamba_layers.23.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
220
  "model.mamba_layers.23.mamba.norm.weight": "model-00001-of-00002.safetensors",
221
  "model.mamba_layers.23.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
222
  "model.mamba_layers.24.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -225,7 +225,7 @@
225
  "model.mamba_layers.24.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
226
  "model.mamba_layers.24.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
227
  "model.mamba_layers.24.mamba.dt_bias": "model-00001-of-00002.safetensors",
228
- "model.mamba_layers.24.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
229
  "model.mamba_layers.24.mamba.norm.weight": "model-00001-of-00002.safetensors",
230
  "model.mamba_layers.24.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
231
  "model.mamba_layers.25.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -234,7 +234,7 @@
234
  "model.mamba_layers.25.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
235
  "model.mamba_layers.25.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
236
  "model.mamba_layers.25.mamba.dt_bias": "model-00001-of-00002.safetensors",
237
- "model.mamba_layers.25.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
238
  "model.mamba_layers.25.mamba.norm.weight": "model-00001-of-00002.safetensors",
239
  "model.mamba_layers.25.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
240
  "model.mamba_layers.26.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -243,7 +243,7 @@
243
  "model.mamba_layers.26.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
244
  "model.mamba_layers.26.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
245
  "model.mamba_layers.26.mamba.dt_bias": "model-00001-of-00002.safetensors",
246
- "model.mamba_layers.26.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
247
  "model.mamba_layers.26.mamba.norm.weight": "model-00001-of-00002.safetensors",
248
  "model.mamba_layers.26.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
249
  "model.mamba_layers.27.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -252,7 +252,7 @@
252
  "model.mamba_layers.27.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
253
  "model.mamba_layers.27.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
254
  "model.mamba_layers.27.mamba.dt_bias": "model-00001-of-00002.safetensors",
255
- "model.mamba_layers.27.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
256
  "model.mamba_layers.27.mamba.norm.weight": "model-00001-of-00002.safetensors",
257
  "model.mamba_layers.27.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
258
  "model.mamba_layers.28.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -261,7 +261,7 @@
261
  "model.mamba_layers.28.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
262
  "model.mamba_layers.28.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
263
  "model.mamba_layers.28.mamba.dt_bias": "model-00001-of-00002.safetensors",
264
- "model.mamba_layers.28.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
265
  "model.mamba_layers.28.mamba.norm.weight": "model-00001-of-00002.safetensors",
266
  "model.mamba_layers.28.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
267
  "model.mamba_layers.29.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -270,7 +270,7 @@
270
  "model.mamba_layers.29.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
271
  "model.mamba_layers.29.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
272
  "model.mamba_layers.29.mamba.dt_bias": "model-00001-of-00002.safetensors",
273
- "model.mamba_layers.29.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
274
  "model.mamba_layers.29.mamba.norm.weight": "model-00001-of-00002.safetensors",
275
  "model.mamba_layers.29.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
276
  "model.mamba_layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -279,7 +279,7 @@
279
  "model.mamba_layers.3.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
280
  "model.mamba_layers.3.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
281
  "model.mamba_layers.3.mamba.dt_bias": "model-00001-of-00002.safetensors",
282
- "model.mamba_layers.3.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
283
  "model.mamba_layers.3.mamba.norm.weight": "model-00001-of-00002.safetensors",
284
  "model.mamba_layers.3.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
285
  "model.mamba_layers.30.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -288,7 +288,7 @@
288
  "model.mamba_layers.30.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
289
  "model.mamba_layers.30.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
290
  "model.mamba_layers.30.mamba.dt_bias": "model-00001-of-00002.safetensors",
291
- "model.mamba_layers.30.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
292
  "model.mamba_layers.30.mamba.norm.weight": "model-00001-of-00002.safetensors",
293
  "model.mamba_layers.30.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
294
  "model.mamba_layers.31.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -297,7 +297,7 @@
297
  "model.mamba_layers.31.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
298
  "model.mamba_layers.31.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
299
  "model.mamba_layers.31.mamba.dt_bias": "model-00001-of-00002.safetensors",
300
- "model.mamba_layers.31.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
301
  "model.mamba_layers.31.mamba.norm.weight": "model-00001-of-00002.safetensors",
302
  "model.mamba_layers.31.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
303
  "model.mamba_layers.32.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -306,7 +306,7 @@
306
  "model.mamba_layers.32.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
307
  "model.mamba_layers.32.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
308
  "model.mamba_layers.32.mamba.dt_bias": "model-00001-of-00002.safetensors",
309
- "model.mamba_layers.32.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
310
  "model.mamba_layers.32.mamba.norm.weight": "model-00001-of-00002.safetensors",
311
  "model.mamba_layers.32.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
312
  "model.mamba_layers.33.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -315,7 +315,7 @@
315
  "model.mamba_layers.33.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
316
  "model.mamba_layers.33.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
317
  "model.mamba_layers.33.mamba.dt_bias": "model-00001-of-00002.safetensors",
318
- "model.mamba_layers.33.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
319
  "model.mamba_layers.33.mamba.norm.weight": "model-00001-of-00002.safetensors",
320
  "model.mamba_layers.33.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
321
  "model.mamba_layers.34.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -324,7 +324,7 @@
324
  "model.mamba_layers.34.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
325
  "model.mamba_layers.34.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
326
  "model.mamba_layers.34.mamba.dt_bias": "model-00001-of-00002.safetensors",
327
- "model.mamba_layers.34.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
328
  "model.mamba_layers.34.mamba.norm.weight": "model-00001-of-00002.safetensors",
329
  "model.mamba_layers.34.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
330
  "model.mamba_layers.35.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -333,7 +333,7 @@
333
  "model.mamba_layers.35.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
334
  "model.mamba_layers.35.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
335
  "model.mamba_layers.35.mamba.dt_bias": "model-00001-of-00002.safetensors",
336
- "model.mamba_layers.35.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
337
  "model.mamba_layers.35.mamba.norm.weight": "model-00001-of-00002.safetensors",
338
  "model.mamba_layers.35.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
339
  "model.mamba_layers.36.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -342,7 +342,7 @@
342
  "model.mamba_layers.36.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
343
  "model.mamba_layers.36.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
344
  "model.mamba_layers.36.mamba.dt_bias": "model-00001-of-00002.safetensors",
345
- "model.mamba_layers.36.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
346
  "model.mamba_layers.36.mamba.norm.weight": "model-00001-of-00002.safetensors",
347
  "model.mamba_layers.36.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
348
  "model.mamba_layers.37.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -351,7 +351,7 @@
351
  "model.mamba_layers.37.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
352
  "model.mamba_layers.37.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
353
  "model.mamba_layers.37.mamba.dt_bias": "model-00001-of-00002.safetensors",
354
- "model.mamba_layers.37.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
355
  "model.mamba_layers.37.mamba.norm.weight": "model-00001-of-00002.safetensors",
356
  "model.mamba_layers.37.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
357
  "model.mamba_layers.38.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -360,7 +360,7 @@
360
  "model.mamba_layers.38.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
361
  "model.mamba_layers.38.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
362
  "model.mamba_layers.38.mamba.dt_bias": "model-00001-of-00002.safetensors",
363
- "model.mamba_layers.38.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
364
  "model.mamba_layers.38.mamba.norm.weight": "model-00001-of-00002.safetensors",
365
  "model.mamba_layers.38.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
366
  "model.mamba_layers.39.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -369,7 +369,7 @@
369
  "model.mamba_layers.39.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
370
  "model.mamba_layers.39.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
371
  "model.mamba_layers.39.mamba.dt_bias": "model-00001-of-00002.safetensors",
372
- "model.mamba_layers.39.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
373
  "model.mamba_layers.39.mamba.norm.weight": "model-00001-of-00002.safetensors",
374
  "model.mamba_layers.39.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
375
  "model.mamba_layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -378,7 +378,7 @@
378
  "model.mamba_layers.4.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
379
  "model.mamba_layers.4.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
380
  "model.mamba_layers.4.mamba.dt_bias": "model-00001-of-00002.safetensors",
381
- "model.mamba_layers.4.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
382
  "model.mamba_layers.4.mamba.norm.weight": "model-00001-of-00002.safetensors",
383
  "model.mamba_layers.4.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
384
  "model.mamba_layers.40.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -387,7 +387,7 @@
387
  "model.mamba_layers.40.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
388
  "model.mamba_layers.40.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
389
  "model.mamba_layers.40.mamba.dt_bias": "model-00001-of-00002.safetensors",
390
- "model.mamba_layers.40.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
391
  "model.mamba_layers.40.mamba.norm.weight": "model-00001-of-00002.safetensors",
392
  "model.mamba_layers.40.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
393
  "model.mamba_layers.41.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -396,7 +396,7 @@
396
  "model.mamba_layers.41.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
397
  "model.mamba_layers.41.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
398
  "model.mamba_layers.41.mamba.dt_bias": "model-00001-of-00002.safetensors",
399
- "model.mamba_layers.41.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
400
  "model.mamba_layers.41.mamba.norm.weight": "model-00001-of-00002.safetensors",
401
  "model.mamba_layers.41.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
402
  "model.mamba_layers.42.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -405,7 +405,7 @@
405
  "model.mamba_layers.42.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
406
  "model.mamba_layers.42.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
407
  "model.mamba_layers.42.mamba.dt_bias": "model-00001-of-00002.safetensors",
408
- "model.mamba_layers.42.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
409
  "model.mamba_layers.42.mamba.norm.weight": "model-00001-of-00002.safetensors",
410
  "model.mamba_layers.42.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
411
  "model.mamba_layers.43.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -414,7 +414,7 @@
414
  "model.mamba_layers.43.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
415
  "model.mamba_layers.43.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
416
  "model.mamba_layers.43.mamba.dt_bias": "model-00001-of-00002.safetensors",
417
- "model.mamba_layers.43.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
418
  "model.mamba_layers.43.mamba.norm.weight": "model-00001-of-00002.safetensors",
419
  "model.mamba_layers.43.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
420
  "model.mamba_layers.44.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -423,7 +423,7 @@
423
  "model.mamba_layers.44.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
424
  "model.mamba_layers.44.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
425
  "model.mamba_layers.44.mamba.dt_bias": "model-00001-of-00002.safetensors",
426
- "model.mamba_layers.44.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
427
  "model.mamba_layers.44.mamba.norm.weight": "model-00001-of-00002.safetensors",
428
  "model.mamba_layers.44.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
429
  "model.mamba_layers.45.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -432,7 +432,7 @@
432
  "model.mamba_layers.45.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
433
  "model.mamba_layers.45.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
434
  "model.mamba_layers.45.mamba.dt_bias": "model-00001-of-00002.safetensors",
435
- "model.mamba_layers.45.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
436
  "model.mamba_layers.45.mamba.norm.weight": "model-00001-of-00002.safetensors",
437
  "model.mamba_layers.45.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
438
  "model.mamba_layers.46.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -441,7 +441,7 @@
441
  "model.mamba_layers.46.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
442
  "model.mamba_layers.46.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
443
  "model.mamba_layers.46.mamba.dt_bias": "model-00001-of-00002.safetensors",
444
- "model.mamba_layers.46.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
445
  "model.mamba_layers.46.mamba.norm.weight": "model-00001-of-00002.safetensors",
446
  "model.mamba_layers.46.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
447
  "model.mamba_layers.47.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -450,7 +450,7 @@
450
  "model.mamba_layers.47.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
451
  "model.mamba_layers.47.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
452
  "model.mamba_layers.47.mamba.dt_bias": "model-00001-of-00002.safetensors",
453
- "model.mamba_layers.47.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
454
  "model.mamba_layers.47.mamba.norm.weight": "model-00001-of-00002.safetensors",
455
  "model.mamba_layers.47.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
456
  "model.mamba_layers.48.input_layernorm.weight": "model-00002-of-00002.safetensors",
@@ -459,7 +459,7 @@
459
  "model.mamba_layers.48.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
460
  "model.mamba_layers.48.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
461
  "model.mamba_layers.48.mamba.dt_bias": "model-00001-of-00002.safetensors",
462
- "model.mamba_layers.48.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
463
  "model.mamba_layers.48.mamba.norm.weight": "model-00001-of-00002.safetensors",
464
  "model.mamba_layers.48.mamba.out_proj.weight": "model-00002-of-00002.safetensors",
465
  "model.mamba_layers.49.input_layernorm.weight": "model-00002-of-00002.safetensors",
@@ -468,7 +468,7 @@
468
  "model.mamba_layers.49.mamba.conv1d.bias": "model-00002-of-00002.safetensors",
469
  "model.mamba_layers.49.mamba.conv1d.weight": "model-00002-of-00002.safetensors",
470
  "model.mamba_layers.49.mamba.dt_bias": "model-00002-of-00002.safetensors",
471
- "model.mamba_layers.49.mamba.in_proj.0.weight": "model-00002-of-00002.safetensors",
472
  "model.mamba_layers.49.mamba.norm.weight": "model-00002-of-00002.safetensors",
473
  "model.mamba_layers.49.mamba.out_proj.weight": "model-00002-of-00002.safetensors",
474
  "model.mamba_layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -477,7 +477,7 @@
477
  "model.mamba_layers.5.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
478
  "model.mamba_layers.5.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
479
  "model.mamba_layers.5.mamba.dt_bias": "model-00001-of-00002.safetensors",
480
- "model.mamba_layers.5.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
481
  "model.mamba_layers.5.mamba.norm.weight": "model-00001-of-00002.safetensors",
482
  "model.mamba_layers.5.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
483
  "model.mamba_layers.50.input_layernorm.weight": "model-00002-of-00002.safetensors",
@@ -486,7 +486,7 @@
486
  "model.mamba_layers.50.mamba.conv1d.bias": "model-00002-of-00002.safetensors",
487
  "model.mamba_layers.50.mamba.conv1d.weight": "model-00002-of-00002.safetensors",
488
  "model.mamba_layers.50.mamba.dt_bias": "model-00002-of-00002.safetensors",
489
- "model.mamba_layers.50.mamba.in_proj.0.weight": "model-00002-of-00002.safetensors",
490
  "model.mamba_layers.50.mamba.norm.weight": "model-00002-of-00002.safetensors",
491
  "model.mamba_layers.50.mamba.out_proj.weight": "model-00002-of-00002.safetensors",
492
  "model.mamba_layers.51.input_layernorm.weight": "model-00002-of-00002.safetensors",
@@ -495,7 +495,7 @@
495
  "model.mamba_layers.51.mamba.conv1d.bias": "model-00002-of-00002.safetensors",
496
  "model.mamba_layers.51.mamba.conv1d.weight": "model-00002-of-00002.safetensors",
497
  "model.mamba_layers.51.mamba.dt_bias": "model-00002-of-00002.safetensors",
498
- "model.mamba_layers.51.mamba.in_proj.0.weight": "model-00002-of-00002.safetensors",
499
  "model.mamba_layers.51.mamba.norm.weight": "model-00002-of-00002.safetensors",
500
  "model.mamba_layers.51.mamba.out_proj.weight": "model-00002-of-00002.safetensors",
501
  "model.mamba_layers.52.input_layernorm.weight": "model-00002-of-00002.safetensors",
@@ -504,7 +504,7 @@
504
  "model.mamba_layers.52.mamba.conv1d.bias": "model-00002-of-00002.safetensors",
505
  "model.mamba_layers.52.mamba.conv1d.weight": "model-00002-of-00002.safetensors",
506
  "model.mamba_layers.52.mamba.dt_bias": "model-00002-of-00002.safetensors",
507
- "model.mamba_layers.52.mamba.in_proj.0.weight": "model-00002-of-00002.safetensors",
508
  "model.mamba_layers.52.mamba.norm.weight": "model-00002-of-00002.safetensors",
509
  "model.mamba_layers.52.mamba.out_proj.weight": "model-00002-of-00002.safetensors",
510
  "model.mamba_layers.53.input_layernorm.weight": "model-00002-of-00002.safetensors",
@@ -513,7 +513,7 @@
513
  "model.mamba_layers.53.mamba.conv1d.bias": "model-00002-of-00002.safetensors",
514
  "model.mamba_layers.53.mamba.conv1d.weight": "model-00002-of-00002.safetensors",
515
  "model.mamba_layers.53.mamba.dt_bias": "model-00002-of-00002.safetensors",
516
- "model.mamba_layers.53.mamba.in_proj.0.weight": "model-00002-of-00002.safetensors",
517
  "model.mamba_layers.53.mamba.norm.weight": "model-00002-of-00002.safetensors",
518
  "model.mamba_layers.53.mamba.out_proj.weight": "model-00002-of-00002.safetensors",
519
  "model.mamba_layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -522,7 +522,7 @@
522
  "model.mamba_layers.6.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
523
  "model.mamba_layers.6.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
524
  "model.mamba_layers.6.mamba.dt_bias": "model-00001-of-00002.safetensors",
525
- "model.mamba_layers.6.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
526
  "model.mamba_layers.6.mamba.norm.weight": "model-00001-of-00002.safetensors",
527
  "model.mamba_layers.6.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
528
  "model.mamba_layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -531,7 +531,7 @@
531
  "model.mamba_layers.7.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
532
  "model.mamba_layers.7.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
533
  "model.mamba_layers.7.mamba.dt_bias": "model-00001-of-00002.safetensors",
534
- "model.mamba_layers.7.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
535
  "model.mamba_layers.7.mamba.norm.weight": "model-00001-of-00002.safetensors",
536
  "model.mamba_layers.7.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
537
  "model.mamba_layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -540,7 +540,7 @@
540
  "model.mamba_layers.8.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
541
  "model.mamba_layers.8.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
542
  "model.mamba_layers.8.mamba.dt_bias": "model-00001-of-00002.safetensors",
543
- "model.mamba_layers.8.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
544
  "model.mamba_layers.8.mamba.norm.weight": "model-00001-of-00002.safetensors",
545
  "model.mamba_layers.8.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
546
  "model.mamba_layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
@@ -549,8 +549,8 @@
549
  "model.mamba_layers.9.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
550
  "model.mamba_layers.9.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
551
  "model.mamba_layers.9.mamba.dt_bias": "model-00001-of-00002.safetensors",
552
- "model.mamba_layers.9.mamba.in_proj.0.weight": "model-00001-of-00002.safetensors",
553
  "model.mamba_layers.9.mamba.norm.weight": "model-00001-of-00002.safetensors",
554
  "model.mamba_layers.9.mamba.out_proj.weight": "model-00001-of-00002.safetensors"
555
  }
556
- }
 
72
  "model.mamba_layers.0.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
73
  "model.mamba_layers.0.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
74
  "model.mamba_layers.0.mamba.dt_bias": "model-00001-of-00002.safetensors",
75
+ "model.mamba_layers.0.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
76
  "model.mamba_layers.0.mamba.norm.weight": "model-00001-of-00002.safetensors",
77
  "model.mamba_layers.0.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
78
  "model.mamba_layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
81
  "model.mamba_layers.1.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
82
  "model.mamba_layers.1.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
83
  "model.mamba_layers.1.mamba.dt_bias": "model-00001-of-00002.safetensors",
84
+ "model.mamba_layers.1.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
85
  "model.mamba_layers.1.mamba.norm.weight": "model-00001-of-00002.safetensors",
86
  "model.mamba_layers.1.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
87
  "model.mamba_layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
90
  "model.mamba_layers.10.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
91
  "model.mamba_layers.10.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
92
  "model.mamba_layers.10.mamba.dt_bias": "model-00001-of-00002.safetensors",
93
+ "model.mamba_layers.10.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
94
  "model.mamba_layers.10.mamba.norm.weight": "model-00001-of-00002.safetensors",
95
  "model.mamba_layers.10.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
96
  "model.mamba_layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
99
  "model.mamba_layers.11.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
100
  "model.mamba_layers.11.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
101
  "model.mamba_layers.11.mamba.dt_bias": "model-00001-of-00002.safetensors",
102
+ "model.mamba_layers.11.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
103
  "model.mamba_layers.11.mamba.norm.weight": "model-00001-of-00002.safetensors",
104
  "model.mamba_layers.11.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
105
  "model.mamba_layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
108
  "model.mamba_layers.12.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
109
  "model.mamba_layers.12.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
110
  "model.mamba_layers.12.mamba.dt_bias": "model-00001-of-00002.safetensors",
111
+ "model.mamba_layers.12.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
112
  "model.mamba_layers.12.mamba.norm.weight": "model-00001-of-00002.safetensors",
113
  "model.mamba_layers.12.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
114
  "model.mamba_layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
117
  "model.mamba_layers.13.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
118
  "model.mamba_layers.13.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
119
  "model.mamba_layers.13.mamba.dt_bias": "model-00001-of-00002.safetensors",
120
+ "model.mamba_layers.13.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
121
  "model.mamba_layers.13.mamba.norm.weight": "model-00001-of-00002.safetensors",
122
  "model.mamba_layers.13.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
123
  "model.mamba_layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
126
  "model.mamba_layers.14.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
127
  "model.mamba_layers.14.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
128
  "model.mamba_layers.14.mamba.dt_bias": "model-00001-of-00002.safetensors",
129
+ "model.mamba_layers.14.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
130
  "model.mamba_layers.14.mamba.norm.weight": "model-00001-of-00002.safetensors",
131
  "model.mamba_layers.14.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
132
  "model.mamba_layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
135
  "model.mamba_layers.15.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
136
  "model.mamba_layers.15.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
137
  "model.mamba_layers.15.mamba.dt_bias": "model-00001-of-00002.safetensors",
138
+ "model.mamba_layers.15.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
139
  "model.mamba_layers.15.mamba.norm.weight": "model-00001-of-00002.safetensors",
140
  "model.mamba_layers.15.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
141
  "model.mamba_layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
144
  "model.mamba_layers.16.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
145
  "model.mamba_layers.16.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
146
  "model.mamba_layers.16.mamba.dt_bias": "model-00001-of-00002.safetensors",
147
+ "model.mamba_layers.16.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
148
  "model.mamba_layers.16.mamba.norm.weight": "model-00001-of-00002.safetensors",
149
  "model.mamba_layers.16.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
150
  "model.mamba_layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
153
  "model.mamba_layers.17.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
154
  "model.mamba_layers.17.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
155
  "model.mamba_layers.17.mamba.dt_bias": "model-00001-of-00002.safetensors",
156
+ "model.mamba_layers.17.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
157
  "model.mamba_layers.17.mamba.norm.weight": "model-00001-of-00002.safetensors",
158
  "model.mamba_layers.17.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
159
  "model.mamba_layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
162
  "model.mamba_layers.18.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
163
  "model.mamba_layers.18.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
164
  "model.mamba_layers.18.mamba.dt_bias": "model-00001-of-00002.safetensors",
165
+ "model.mamba_layers.18.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
166
  "model.mamba_layers.18.mamba.norm.weight": "model-00001-of-00002.safetensors",
167
  "model.mamba_layers.18.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
168
  "model.mamba_layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
171
  "model.mamba_layers.19.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
172
  "model.mamba_layers.19.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
173
  "model.mamba_layers.19.mamba.dt_bias": "model-00001-of-00002.safetensors",
174
+ "model.mamba_layers.19.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
175
  "model.mamba_layers.19.mamba.norm.weight": "model-00001-of-00002.safetensors",
176
  "model.mamba_layers.19.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
177
  "model.mamba_layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
180
  "model.mamba_layers.2.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
181
  "model.mamba_layers.2.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
182
  "model.mamba_layers.2.mamba.dt_bias": "model-00001-of-00002.safetensors",
183
+ "model.mamba_layers.2.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
184
  "model.mamba_layers.2.mamba.norm.weight": "model-00001-of-00002.safetensors",
185
  "model.mamba_layers.2.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
186
  "model.mamba_layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
189
  "model.mamba_layers.20.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
190
  "model.mamba_layers.20.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
191
  "model.mamba_layers.20.mamba.dt_bias": "model-00001-of-00002.safetensors",
192
+ "model.mamba_layers.20.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
193
  "model.mamba_layers.20.mamba.norm.weight": "model-00001-of-00002.safetensors",
194
  "model.mamba_layers.20.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
195
  "model.mamba_layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
198
  "model.mamba_layers.21.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
199
  "model.mamba_layers.21.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
200
  "model.mamba_layers.21.mamba.dt_bias": "model-00001-of-00002.safetensors",
201
+ "model.mamba_layers.21.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
202
  "model.mamba_layers.21.mamba.norm.weight": "model-00001-of-00002.safetensors",
203
  "model.mamba_layers.21.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
204
  "model.mamba_layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
207
  "model.mamba_layers.22.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
208
  "model.mamba_layers.22.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
209
  "model.mamba_layers.22.mamba.dt_bias": "model-00001-of-00002.safetensors",
210
+ "model.mamba_layers.22.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
211
  "model.mamba_layers.22.mamba.norm.weight": "model-00001-of-00002.safetensors",
212
  "model.mamba_layers.22.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
213
  "model.mamba_layers.23.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
216
  "model.mamba_layers.23.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
217
  "model.mamba_layers.23.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
218
  "model.mamba_layers.23.mamba.dt_bias": "model-00001-of-00002.safetensors",
219
+ "model.mamba_layers.23.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
220
  "model.mamba_layers.23.mamba.norm.weight": "model-00001-of-00002.safetensors",
221
  "model.mamba_layers.23.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
222
  "model.mamba_layers.24.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
225
  "model.mamba_layers.24.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
226
  "model.mamba_layers.24.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
227
  "model.mamba_layers.24.mamba.dt_bias": "model-00001-of-00002.safetensors",
228
+ "model.mamba_layers.24.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
229
  "model.mamba_layers.24.mamba.norm.weight": "model-00001-of-00002.safetensors",
230
  "model.mamba_layers.24.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
231
  "model.mamba_layers.25.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
234
  "model.mamba_layers.25.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
235
  "model.mamba_layers.25.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
236
  "model.mamba_layers.25.mamba.dt_bias": "model-00001-of-00002.safetensors",
237
+ "model.mamba_layers.25.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
238
  "model.mamba_layers.25.mamba.norm.weight": "model-00001-of-00002.safetensors",
239
  "model.mamba_layers.25.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
240
  "model.mamba_layers.26.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
243
  "model.mamba_layers.26.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
244
  "model.mamba_layers.26.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
245
  "model.mamba_layers.26.mamba.dt_bias": "model-00001-of-00002.safetensors",
246
+ "model.mamba_layers.26.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
247
  "model.mamba_layers.26.mamba.norm.weight": "model-00001-of-00002.safetensors",
248
  "model.mamba_layers.26.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
249
  "model.mamba_layers.27.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
252
  "model.mamba_layers.27.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
253
  "model.mamba_layers.27.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
254
  "model.mamba_layers.27.mamba.dt_bias": "model-00001-of-00002.safetensors",
255
+ "model.mamba_layers.27.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
256
  "model.mamba_layers.27.mamba.norm.weight": "model-00001-of-00002.safetensors",
257
  "model.mamba_layers.27.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
258
  "model.mamba_layers.28.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
261
  "model.mamba_layers.28.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
262
  "model.mamba_layers.28.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
263
  "model.mamba_layers.28.mamba.dt_bias": "model-00001-of-00002.safetensors",
264
+ "model.mamba_layers.28.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
265
  "model.mamba_layers.28.mamba.norm.weight": "model-00001-of-00002.safetensors",
266
  "model.mamba_layers.28.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
267
  "model.mamba_layers.29.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
270
  "model.mamba_layers.29.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
271
  "model.mamba_layers.29.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
272
  "model.mamba_layers.29.mamba.dt_bias": "model-00001-of-00002.safetensors",
273
+ "model.mamba_layers.29.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
274
  "model.mamba_layers.29.mamba.norm.weight": "model-00001-of-00002.safetensors",
275
  "model.mamba_layers.29.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
276
  "model.mamba_layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
279
  "model.mamba_layers.3.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
280
  "model.mamba_layers.3.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
281
  "model.mamba_layers.3.mamba.dt_bias": "model-00001-of-00002.safetensors",
282
+ "model.mamba_layers.3.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
283
  "model.mamba_layers.3.mamba.norm.weight": "model-00001-of-00002.safetensors",
284
  "model.mamba_layers.3.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
285
  "model.mamba_layers.30.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
288
  "model.mamba_layers.30.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
289
  "model.mamba_layers.30.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
290
  "model.mamba_layers.30.mamba.dt_bias": "model-00001-of-00002.safetensors",
291
+ "model.mamba_layers.30.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
292
  "model.mamba_layers.30.mamba.norm.weight": "model-00001-of-00002.safetensors",
293
  "model.mamba_layers.30.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
294
  "model.mamba_layers.31.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
297
  "model.mamba_layers.31.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
298
  "model.mamba_layers.31.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
299
  "model.mamba_layers.31.mamba.dt_bias": "model-00001-of-00002.safetensors",
300
+ "model.mamba_layers.31.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
301
  "model.mamba_layers.31.mamba.norm.weight": "model-00001-of-00002.safetensors",
302
  "model.mamba_layers.31.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
303
  "model.mamba_layers.32.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
306
  "model.mamba_layers.32.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
307
  "model.mamba_layers.32.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
308
  "model.mamba_layers.32.mamba.dt_bias": "model-00001-of-00002.safetensors",
309
+ "model.mamba_layers.32.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
310
  "model.mamba_layers.32.mamba.norm.weight": "model-00001-of-00002.safetensors",
311
  "model.mamba_layers.32.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
312
  "model.mamba_layers.33.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
315
  "model.mamba_layers.33.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
316
  "model.mamba_layers.33.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
317
  "model.mamba_layers.33.mamba.dt_bias": "model-00001-of-00002.safetensors",
318
+ "model.mamba_layers.33.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
319
  "model.mamba_layers.33.mamba.norm.weight": "model-00001-of-00002.safetensors",
320
  "model.mamba_layers.33.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
321
  "model.mamba_layers.34.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
324
  "model.mamba_layers.34.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
325
  "model.mamba_layers.34.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
326
  "model.mamba_layers.34.mamba.dt_bias": "model-00001-of-00002.safetensors",
327
+ "model.mamba_layers.34.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
328
  "model.mamba_layers.34.mamba.norm.weight": "model-00001-of-00002.safetensors",
329
  "model.mamba_layers.34.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
330
  "model.mamba_layers.35.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
333
  "model.mamba_layers.35.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
334
  "model.mamba_layers.35.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
335
  "model.mamba_layers.35.mamba.dt_bias": "model-00001-of-00002.safetensors",
336
+ "model.mamba_layers.35.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
337
  "model.mamba_layers.35.mamba.norm.weight": "model-00001-of-00002.safetensors",
338
  "model.mamba_layers.35.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
339
  "model.mamba_layers.36.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
342
  "model.mamba_layers.36.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
343
  "model.mamba_layers.36.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
344
  "model.mamba_layers.36.mamba.dt_bias": "model-00001-of-00002.safetensors",
345
+ "model.mamba_layers.36.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
346
  "model.mamba_layers.36.mamba.norm.weight": "model-00001-of-00002.safetensors",
347
  "model.mamba_layers.36.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
348
  "model.mamba_layers.37.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
351
  "model.mamba_layers.37.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
352
  "model.mamba_layers.37.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
353
  "model.mamba_layers.37.mamba.dt_bias": "model-00001-of-00002.safetensors",
354
+ "model.mamba_layers.37.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
355
  "model.mamba_layers.37.mamba.norm.weight": "model-00001-of-00002.safetensors",
356
  "model.mamba_layers.37.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
357
  "model.mamba_layers.38.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
360
  "model.mamba_layers.38.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
361
  "model.mamba_layers.38.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
362
  "model.mamba_layers.38.mamba.dt_bias": "model-00001-of-00002.safetensors",
363
+ "model.mamba_layers.38.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
364
  "model.mamba_layers.38.mamba.norm.weight": "model-00001-of-00002.safetensors",
365
  "model.mamba_layers.38.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
366
  "model.mamba_layers.39.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
369
  "model.mamba_layers.39.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
370
  "model.mamba_layers.39.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
371
  "model.mamba_layers.39.mamba.dt_bias": "model-00001-of-00002.safetensors",
372
+ "model.mamba_layers.39.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
373
  "model.mamba_layers.39.mamba.norm.weight": "model-00001-of-00002.safetensors",
374
  "model.mamba_layers.39.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
375
  "model.mamba_layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
378
  "model.mamba_layers.4.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
379
  "model.mamba_layers.4.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
380
  "model.mamba_layers.4.mamba.dt_bias": "model-00001-of-00002.safetensors",
381
+ "model.mamba_layers.4.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
382
  "model.mamba_layers.4.mamba.norm.weight": "model-00001-of-00002.safetensors",
383
  "model.mamba_layers.4.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
384
  "model.mamba_layers.40.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
387
  "model.mamba_layers.40.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
388
  "model.mamba_layers.40.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
389
  "model.mamba_layers.40.mamba.dt_bias": "model-00001-of-00002.safetensors",
390
+ "model.mamba_layers.40.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
391
  "model.mamba_layers.40.mamba.norm.weight": "model-00001-of-00002.safetensors",
392
  "model.mamba_layers.40.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
393
  "model.mamba_layers.41.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
396
  "model.mamba_layers.41.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
397
  "model.mamba_layers.41.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
398
  "model.mamba_layers.41.mamba.dt_bias": "model-00001-of-00002.safetensors",
399
+ "model.mamba_layers.41.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
400
  "model.mamba_layers.41.mamba.norm.weight": "model-00001-of-00002.safetensors",
401
  "model.mamba_layers.41.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
402
  "model.mamba_layers.42.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
405
  "model.mamba_layers.42.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
406
  "model.mamba_layers.42.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
407
  "model.mamba_layers.42.mamba.dt_bias": "model-00001-of-00002.safetensors",
408
+ "model.mamba_layers.42.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
409
  "model.mamba_layers.42.mamba.norm.weight": "model-00001-of-00002.safetensors",
410
  "model.mamba_layers.42.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
411
  "model.mamba_layers.43.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
414
  "model.mamba_layers.43.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
415
  "model.mamba_layers.43.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
416
  "model.mamba_layers.43.mamba.dt_bias": "model-00001-of-00002.safetensors",
417
+ "model.mamba_layers.43.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
418
  "model.mamba_layers.43.mamba.norm.weight": "model-00001-of-00002.safetensors",
419
  "model.mamba_layers.43.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
420
  "model.mamba_layers.44.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
423
  "model.mamba_layers.44.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
424
  "model.mamba_layers.44.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
425
  "model.mamba_layers.44.mamba.dt_bias": "model-00001-of-00002.safetensors",
426
+ "model.mamba_layers.44.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
427
  "model.mamba_layers.44.mamba.norm.weight": "model-00001-of-00002.safetensors",
428
  "model.mamba_layers.44.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
429
  "model.mamba_layers.45.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
432
  "model.mamba_layers.45.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
433
  "model.mamba_layers.45.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
434
  "model.mamba_layers.45.mamba.dt_bias": "model-00001-of-00002.safetensors",
435
+ "model.mamba_layers.45.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
436
  "model.mamba_layers.45.mamba.norm.weight": "model-00001-of-00002.safetensors",
437
  "model.mamba_layers.45.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
438
  "model.mamba_layers.46.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
441
  "model.mamba_layers.46.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
442
  "model.mamba_layers.46.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
443
  "model.mamba_layers.46.mamba.dt_bias": "model-00001-of-00002.safetensors",
444
+ "model.mamba_layers.46.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
445
  "model.mamba_layers.46.mamba.norm.weight": "model-00001-of-00002.safetensors",
446
  "model.mamba_layers.46.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
447
  "model.mamba_layers.47.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
450
  "model.mamba_layers.47.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
451
  "model.mamba_layers.47.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
452
  "model.mamba_layers.47.mamba.dt_bias": "model-00001-of-00002.safetensors",
453
+ "model.mamba_layers.47.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
454
  "model.mamba_layers.47.mamba.norm.weight": "model-00001-of-00002.safetensors",
455
  "model.mamba_layers.47.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
456
  "model.mamba_layers.48.input_layernorm.weight": "model-00002-of-00002.safetensors",
 
459
  "model.mamba_layers.48.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
460
  "model.mamba_layers.48.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
461
  "model.mamba_layers.48.mamba.dt_bias": "model-00001-of-00002.safetensors",
462
+ "model.mamba_layers.48.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
463
  "model.mamba_layers.48.mamba.norm.weight": "model-00001-of-00002.safetensors",
464
  "model.mamba_layers.48.mamba.out_proj.weight": "model-00002-of-00002.safetensors",
465
  "model.mamba_layers.49.input_layernorm.weight": "model-00002-of-00002.safetensors",
 
468
  "model.mamba_layers.49.mamba.conv1d.bias": "model-00002-of-00002.safetensors",
469
  "model.mamba_layers.49.mamba.conv1d.weight": "model-00002-of-00002.safetensors",
470
  "model.mamba_layers.49.mamba.dt_bias": "model-00002-of-00002.safetensors",
471
+ "model.mamba_layers.49.mamba.in_proj.weight": "model-00002-of-00002.safetensors",
472
  "model.mamba_layers.49.mamba.norm.weight": "model-00002-of-00002.safetensors",
473
  "model.mamba_layers.49.mamba.out_proj.weight": "model-00002-of-00002.safetensors",
474
  "model.mamba_layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
477
  "model.mamba_layers.5.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
478
  "model.mamba_layers.5.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
479
  "model.mamba_layers.5.mamba.dt_bias": "model-00001-of-00002.safetensors",
480
+ "model.mamba_layers.5.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
481
  "model.mamba_layers.5.mamba.norm.weight": "model-00001-of-00002.safetensors",
482
  "model.mamba_layers.5.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
483
  "model.mamba_layers.50.input_layernorm.weight": "model-00002-of-00002.safetensors",
 
486
  "model.mamba_layers.50.mamba.conv1d.bias": "model-00002-of-00002.safetensors",
487
  "model.mamba_layers.50.mamba.conv1d.weight": "model-00002-of-00002.safetensors",
488
  "model.mamba_layers.50.mamba.dt_bias": "model-00002-of-00002.safetensors",
489
+ "model.mamba_layers.50.mamba.in_proj.weight": "model-00002-of-00002.safetensors",
490
  "model.mamba_layers.50.mamba.norm.weight": "model-00002-of-00002.safetensors",
491
  "model.mamba_layers.50.mamba.out_proj.weight": "model-00002-of-00002.safetensors",
492
  "model.mamba_layers.51.input_layernorm.weight": "model-00002-of-00002.safetensors",
 
495
  "model.mamba_layers.51.mamba.conv1d.bias": "model-00002-of-00002.safetensors",
496
  "model.mamba_layers.51.mamba.conv1d.weight": "model-00002-of-00002.safetensors",
497
  "model.mamba_layers.51.mamba.dt_bias": "model-00002-of-00002.safetensors",
498
+ "model.mamba_layers.51.mamba.in_proj.weight": "model-00002-of-00002.safetensors",
499
  "model.mamba_layers.51.mamba.norm.weight": "model-00002-of-00002.safetensors",
500
  "model.mamba_layers.51.mamba.out_proj.weight": "model-00002-of-00002.safetensors",
501
  "model.mamba_layers.52.input_layernorm.weight": "model-00002-of-00002.safetensors",
 
504
  "model.mamba_layers.52.mamba.conv1d.bias": "model-00002-of-00002.safetensors",
505
  "model.mamba_layers.52.mamba.conv1d.weight": "model-00002-of-00002.safetensors",
506
  "model.mamba_layers.52.mamba.dt_bias": "model-00002-of-00002.safetensors",
507
+ "model.mamba_layers.52.mamba.in_proj.weight": "model-00002-of-00002.safetensors",
508
  "model.mamba_layers.52.mamba.norm.weight": "model-00002-of-00002.safetensors",
509
  "model.mamba_layers.52.mamba.out_proj.weight": "model-00002-of-00002.safetensors",
510
  "model.mamba_layers.53.input_layernorm.weight": "model-00002-of-00002.safetensors",
 
513
  "model.mamba_layers.53.mamba.conv1d.bias": "model-00002-of-00002.safetensors",
514
  "model.mamba_layers.53.mamba.conv1d.weight": "model-00002-of-00002.safetensors",
515
  "model.mamba_layers.53.mamba.dt_bias": "model-00002-of-00002.safetensors",
516
+ "model.mamba_layers.53.mamba.in_proj.weight": "model-00002-of-00002.safetensors",
517
  "model.mamba_layers.53.mamba.norm.weight": "model-00002-of-00002.safetensors",
518
  "model.mamba_layers.53.mamba.out_proj.weight": "model-00002-of-00002.safetensors",
519
  "model.mamba_layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
522
  "model.mamba_layers.6.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
523
  "model.mamba_layers.6.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
524
  "model.mamba_layers.6.mamba.dt_bias": "model-00001-of-00002.safetensors",
525
+ "model.mamba_layers.6.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
526
  "model.mamba_layers.6.mamba.norm.weight": "model-00001-of-00002.safetensors",
527
  "model.mamba_layers.6.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
528
  "model.mamba_layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
531
  "model.mamba_layers.7.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
532
  "model.mamba_layers.7.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
533
  "model.mamba_layers.7.mamba.dt_bias": "model-00001-of-00002.safetensors",
534
+ "model.mamba_layers.7.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
535
  "model.mamba_layers.7.mamba.norm.weight": "model-00001-of-00002.safetensors",
536
  "model.mamba_layers.7.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
537
  "model.mamba_layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
540
  "model.mamba_layers.8.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
541
  "model.mamba_layers.8.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
542
  "model.mamba_layers.8.mamba.dt_bias": "model-00001-of-00002.safetensors",
543
+ "model.mamba_layers.8.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
544
  "model.mamba_layers.8.mamba.norm.weight": "model-00001-of-00002.safetensors",
545
  "model.mamba_layers.8.mamba.out_proj.weight": "model-00001-of-00002.safetensors",
546
  "model.mamba_layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
 
549
  "model.mamba_layers.9.mamba.conv1d.bias": "model-00001-of-00002.safetensors",
550
  "model.mamba_layers.9.mamba.conv1d.weight": "model-00001-of-00002.safetensors",
551
  "model.mamba_layers.9.mamba.dt_bias": "model-00001-of-00002.safetensors",
552
+ "model.mamba_layers.9.mamba.in_proj.weight": "model-00001-of-00002.safetensors",
553
  "model.mamba_layers.9.mamba.norm.weight": "model-00001-of-00002.safetensors",
554
  "model.mamba_layers.9.mamba.out_proj.weight": "model-00001-of-00002.safetensors"
555
  }
556
+ }