File size: 164 Bytes
d73a532 |
1 2 3 4 5 6 7 8 |
dtype: bfloat16
merge_method: passthrough
parameters:
int8_mask: 1.0
slices:
- sources:
- layer_range: [0, 42]
model: merge/chubby10b+loras/Baldur-r128-LoRA |
d73a532 |
1 2 3 4 5 6 7 8 |
dtype: bfloat16
merge_method: passthrough
parameters:
int8_mask: 1.0
slices:
- sources:
- layer_range: [0, 42]
model: merge/chubby10b+loras/Baldur-r128-LoRA |