models: - model: chihoonlee10/T3Q-Mistral-Orca-Math-DPO # No parameters necessary for base model - model: yam-peleg/Experiment21-7B parameters: density: 0.66 weight: 0.2 - model: CultriX/NeuralTrix-bf16 parameters: density: 0.55 weight: 0.2 - model: louisgrc/Montebello_7B_SLERP parameters: density: 0.55 weight: 0.2 - model: CorticalStack/pastiche-crown-clown-7b-dare-dpo parameters: density: 0.44 weight: 0.2 - model: chihoonlee10/T3Q-Mistral-Orca-Math-DPO parameters: density: 0.66 weight: 0.2 merge_method: dare_ties base_model: chihoonlee10/T3Q-Mistral-Orca-Math-DPO parameters: int8_mask: true dtype: bfloat16