dtype: bfloat16 merge_method: passthrough parameters: int8_mask: 1.0 slices: - sources: - layer_range: [0, 42] model: merge/chubby10b+loras/Baldur-r128-LoRA