mistral-patent-merge-test / mergekit_config.yml
todenthal's picture
Upload 11 files
45807b9 verified
raw
history blame contribute delete
405 Bytes
base_model:
model:
path: Norquinal/Mistral-7B-claude-chat
dtype: float16
merge_method: slerp
parameters:
t:
- filter: mlp
value: [1.0, 0.5, 0.7, 0.3, 0.0]
- value: 0.5
slices:
- sources:
- layer_range: [0, 32]
model:
model:
path: PatentPilotAI/mistral-7b-patent-instruct-v2
- layer_range: [0, 32]
model:
model:
path: Norquinal/Mistral-7B-claude-chat