File size: 565 Bytes
14a80c5 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
merge_method: linear
parameters:
weight: 1.0
slices:
- sources:
- model: 152334H/miqu-1-70b-sf
layer_range: [0, 16]
- model: jukofyork/dark-miqu-70b
layer_range: [0, 16]
parameters:
weight: 0
- sources:
- model: jukofyork/dark-miqu-70b
layer_range: [16, 64]
- sources:
- model: 152334H/miqu-1-70b-sf
layer_range: [64, 80]
- model: jukofyork/dark-miqu-70b
layer_range: [64, 80]
parameters:
weight: 0
dtype: float16
tokenizer_source: model:miqu-1-70b-sf |