File size: 584 Bytes
1931cc6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
slices:
  - sources:
      - model: "NousResearch/Hermes-2-Pro-Llama-3-8B"
        layer_range: [0, 10]
  - sources:
      - model: "NousResearch/Hermes-2-Pro-Llama-3-8B"
        layer_range: [7, 17]
  - sources:
      - model: "NousResearch/Hermes-2-Pro-Llama-3-8B"
        layer_range: [13, 23]
  - sources:
      - model: "NousResearch/Hermes-2-Pro-Llama-3-8B"
        layer_range: [18, 28]
  - sources:
      - model: "NousResearch/Hermes-2-Pro-Llama-3-8B"
        layer_range: [22, 32]

merge_method: passthrough
base_model: "NousResearch/Hermes-2-Pro-Llama-3-8B"
dtype: bfloat16