name: Merged-14B-Ultimate merge_method: della_linear base_model: djuna/Q2.5-Veltha-14B dtype: bfloat16 parameters: epsilon: 0.01 # Fine-grained parameter scaling for stable merges lambda: 1.5 # Emphasizes each model’s unique parameters normalize: true # Normalizes merges across different scale factors models: # 1) Strong average + BBH + conversation - model: sthenno-com/miscii-14b-1225 parameters: weight: 0.25 density: 0.70 # 2) CultriX “FinalMerge” synergy - model: CultriX/Qwen2.5-14B-FinalMerge parameters: weight: 0.15 density: 0.65 # 3) CultriX “Wernickev3”—balanced - model: CultriX/Qwen2.5-14B-Wernickev3 parameters: weight: 0.15 density: 0.65 # 4) CultriX “Broca”—logic & QA - model: CultriX/Qwen2.5-14B-Broca parameters: weight: 0.10 density: 0.65 # 5) CultriX “SeQwence-14Bv1”—general coverage - model: CultriX/SeQwence-14Bv1 parameters: weight: 0.10 density: 0.65 adaptive_merge_parameters: # Weighted emphasis on sub-benchmarks task_weights: IFEval: 1.9 BBH: 1.8 MATH: 1.8 GPQA: 1.7 MUSR: 1.7 MMLU-PRO: 1.7 smoothing_factor: 0.1 gradient_clipping: 1.0 # Prevents over-contribution from any one model