File size: 544 Bytes
e9baa46 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 |
dtype: float16
merge_method: linear
slices:
- sources:
- layer_range: [0, 8]
model: mistralai/Mistral-7B-Instruct-v0.2
parameters:
weight: 0.33
- layer_range: [8, 16]
model: mistralai/Mistral-7B-Instruct-v0.2
parameters:
weight: 0.33
- layer_range: [16, 24]
model: mistralai/Mistral-7B-Instruct-v0.2
parameters:
weight: 0.25
- layer_range: [24, 32]
model: mistralai/Mistral-7B-Instruct-v0.2
parameters:
weight: 0.09 |