Mistral-4B / mergekit_config.yml
fhai50032's picture
Upload folder using huggingface_hub (#1)
55cd9be verified
raw
history blame
494 Bytes
dtype: bfloat16
merge_method: passthrough
slices:
- sources:
- layer_range: [0, 8]
model: fhai50032/RolePlayLake-7B
- sources:
- layer_range: [11, 12]
model: fhai50032/RolePlayLake-7B
- sources:
- layer_range: [15, 16]
model: fhai50032/RolePlayLake-7B
- sources:
- layer_range: [19, 20]
model: fhai50032/RolePlayLake-7B
- sources:
- layer_range: [24, 25]
model: fhai50032/RolePlayLake-7B
- sources:
- layer_range: [28, 32]
model: fhai50032/RolePlayLake-7B