Dolph-Lund-Wizard-7B / mergekit_config.yml
Ether UltraM2
merge completed
20efba7
raw
history blame contribute delete
No virus
674 Bytes
merge_method: dare_ties
parameters:
int8_mask: true
t:
- filter: self_attn
value: [0, 0.5, 0.3, 0.7, 1]
- filter: mlp
value: [1, 0.5, 0.7, 0.3, 0]
- value: 0.5 # fallback for rest of tensors
embed_slerp: true
models:
- model: /Users/etherops1/AI/Noodlz/Noodlz_DolphinLake-DARE_TIE_SLERP-tokenwest
# No parameters necessary for base model
- model: /Users/etherops1/AI/Not-WizardLM-2-7B
parameters:
density: 0.58
weight: 0.4
base_model: /Users/etherops1/AI/Noodlz/Noodlz_DolphinLake-DARE_TIE_SLERP-tokenwest
tokenizer_source: model:/Users/etherops1/AI/Noodlz/Noodlz_DolphinLake-DARE_TIE_SLERP-tokenwest
dtype: bfloat16