|
base_model: meta-llama/Meta-Llama-3-8B-Instruct |
|
dtype: bfloat16 |
|
merge_method: linear |
|
parameters: |
|
int8_mask: 1.0 |
|
normalize: 1.0 |
|
slices: |
|
- sources: |
|
- layer_range: [0, 4] |
|
model: lightblue/suzume-llama-3-8B-multilingual |
|
parameters: |
|
weight: 0.4149739730274144 |
|
- layer_range: [0, 4] |
|
model: meta-llama/Meta-Llama-3-8B-Instruct |
|
parameters: |
|
weight: 0.6781276007090549 |
|
- layer_range: [0, 4] |
|
model: aixsatoshi/Llama-3-youko-8b-instruct-chatvector |
|
parameters: |
|
weight: 0.34616999273932425 |
|
- layer_range: [0, 4] |
|
model: shisa-ai/shisa-v1-llama3-8b |
|
parameters: |
|
weight: 1.3720042419649354 |
|
- sources: |
|
- layer_range: [4, 8] |
|
model: lightblue/suzume-llama-3-8B-multilingual |
|
parameters: |
|
weight: 0.07652836818139683 |
|
- layer_range: [4, 8] |
|
model: meta-llama/Meta-Llama-3-8B-Instruct |
|
parameters: |
|
weight: 1.234379009181979 |
|
- layer_range: [4, 8] |
|
model: aixsatoshi/Llama-3-youko-8b-instruct-chatvector |
|
parameters: |
|
weight: 1.0146729889059811 |
|
- layer_range: [4, 8] |
|
model: shisa-ai/shisa-v1-llama3-8b |
|
parameters: |
|
weight: 0.5811532109389872 |
|
- sources: |
|
- layer_range: [8, 12] |
|
model: lightblue/suzume-llama-3-8B-multilingual |
|
parameters: |
|
weight: 0.5551700273906248 |
|
- layer_range: [8, 12] |
|
model: meta-llama/Meta-Llama-3-8B-Instruct |
|
parameters: |
|
weight: 0.7418501521559635 |
|
- layer_range: [8, 12] |
|
model: aixsatoshi/Llama-3-youko-8b-instruct-chatvector |
|
parameters: |
|
weight: 1.442504375594772 |
|
- layer_range: [8, 12] |
|
model: shisa-ai/shisa-v1-llama3-8b |
|
parameters: |
|
weight: 0.6475631873316974 |
|
- sources: |
|
- layer_range: [12, 16] |
|
model: lightblue/suzume-llama-3-8B-multilingual |
|
parameters: |
|
weight: 0.4227647782669271 |
|
- layer_range: [12, 16] |
|
model: meta-llama/Meta-Llama-3-8B-Instruct |
|
parameters: |
|
weight: 1.2969869792284983 |
|
- layer_range: [12, 16] |
|
model: aixsatoshi/Llama-3-youko-8b-instruct-chatvector |
|
parameters: |
|
weight: 0.7818773805802817 |
|
- layer_range: [12, 16] |
|
model: shisa-ai/shisa-v1-llama3-8b |
|
parameters: |
|
weight: 0.8007371182560976 |
|
- sources: |
|
- layer_range: [16, 20] |
|
model: lightblue/suzume-llama-3-8B-multilingual |
|
parameters: |
|
weight: 0.10979010874744283 |
|
- layer_range: [16, 20] |
|
model: meta-llama/Meta-Llama-3-8B-Instruct |
|
parameters: |
|
weight: 0.19009547180175693 |
|
- layer_range: [16, 20] |
|
model: aixsatoshi/Llama-3-youko-8b-instruct-chatvector |
|
parameters: |
|
weight: 0.6064294349661996 |
|
- layer_range: [16, 20] |
|
model: shisa-ai/shisa-v1-llama3-8b |
|
parameters: |
|
weight: 0.7630087852386511 |
|
- sources: |
|
- layer_range: [20, 24] |
|
model: lightblue/suzume-llama-3-8B-multilingual |
|
parameters: |
|
weight: 0.219671192433268 |
|
- layer_range: [20, 24] |
|
model: meta-llama/Meta-Llama-3-8B-Instruct |
|
parameters: |
|
weight: 0.6303503074132494 |
|
- layer_range: [20, 24] |
|
model: aixsatoshi/Llama-3-youko-8b-instruct-chatvector |
|
parameters: |
|
weight: 0.46265431269055757 |
|
- layer_range: [20, 24] |
|
model: shisa-ai/shisa-v1-llama3-8b |
|
parameters: |
|
weight: 1.4662350856064592 |
|
- sources: |
|
- layer_range: [24, 28] |
|
model: lightblue/suzume-llama-3-8B-multilingual |
|
parameters: |
|
weight: 0.1400550380200451 |
|
- layer_range: [24, 28] |
|
model: meta-llama/Meta-Llama-3-8B-Instruct |
|
parameters: |
|
weight: 1.031570135674053 |
|
- layer_range: [24, 28] |
|
model: aixsatoshi/Llama-3-youko-8b-instruct-chatvector |
|
parameters: |
|
weight: 0.5760956440228217 |
|
- layer_range: [24, 28] |
|
model: shisa-ai/shisa-v1-llama3-8b |
|
parameters: |
|
weight: 1.5264012437679564 |
|
- sources: |
|
- layer_range: [28, 32] |
|
model: lightblue/suzume-llama-3-8B-multilingual |
|
parameters: |
|
weight: 1.2311282964552015 |
|
- layer_range: [28, 32] |
|
model: meta-llama/Meta-Llama-3-8B-Instruct |
|
parameters: |
|
weight: 0.43811773040605967 |
|
- layer_range: [28, 32] |
|
model: aixsatoshi/Llama-3-youko-8b-instruct-chatvector |
|
parameters: |
|
weight: 0.5150682019605872 |
|
- layer_range: [28, 32] |
|
model: shisa-ai/shisa-v1-llama3-8b |
|
parameters: |
|
weight: 0.342193342214983 |