File size: 506 Bytes
65f868f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 |
dtype: float16
merge_method: passthrough
slices:
- sources:
- layer_range: [0, 8]
model: Masterjp123/NeuralMaid-7b
- sources:
- layer_range: [0, 12]
model: lemonilia/AshhLimaRP-Mistral-7B
- sources:
- layer_range: [9, 18]
model: Masterjp123/NeuralMaid-7b
- sources:
- layer_range: [13, 22]
model: lemonilia/AshhLimaRP-Mistral-7B
- sources:
- layer_range: [19, 31]
model: Masterjp123/NeuralMaid-7b
- sources:
- layer_range: [23, 31]
model: lemonilia/AshhLimaRP-Mistral-7B |