File size: 2,036 Bytes
0e59729
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
models:
  - model: /home/alpha/Storage/Models/Raw/chargoddard_Yi-34B-200K-Llama
    # No parameters necessary for base model
  - model: /home/alpha/Storage/Models/Raw/migtissera_Tess-34B-v1.4
    parameters:
      weight: [0.23, 0.125, 0.125, 0.125, 0.125, 0.125]
      density: 0.59
  - model: /home/alpha/Models/Raw/Mihaiii_Pallas-0.5
    parameters:
      weight: [0.23, 0.125, 0.125, 0.125, 0.125, 0.125]
      density: 0.59
  - model: /home/alpha//Storage/Models/Raw/bhenrym14_airoboros-3_1-yi-34b-200k
    parameters:
      weight: [0.02, 0.106, 0.106, 0.106, 0.106, 0.106]
      density: 0.59
  - model: /home/alpha/Storage/Models/Raw/jondurbin_bagel-34b-v0.2
    #Only the SFT in the main merge since the DPO version seems to have no long context ability at all
    parameters:
      weight: [0.02, 0.100, 0.100, 0.100, 0.100, 0.100]
      density: 0.4
  - model: /home/alpha/Storage/Models/Raw/kyujinpy_PlatYi-34B-200k-Q-FastChat
    parameters:
      weight: [0.02, 0.100, 0.100, 0.100, 0.100, 0.100]
      density: 0.59
  #- model: /home/alpha/Storage/Models/Raw/ehartford_dolphin-2.2-yi-34b-200k
  #  Dolphin 200K seems to be funky according to multiple leaderboards and perplexity tests?
  #  parameters:
  #    weight: 0.15
  #    density: 0.6
  - model: /home/alpha/Models/Raw/adamo1139_Yi-34B-200K-AEZAKMI-v2
    parameters:
      weight: [0.02, 0.110, 0.110, 0.110, 0.110, 0.110]
      density: 0.59
  - model: /home/alpha/Storage/Models/Raw/Nous-Capybara-34B
    parameters:
      weight:  [0.22, 0.126, 0.126, 0.126, 0.126, 0.126]
      density: 0.59
  - model: /home/alpha/Storage/Models/Raw/4kmerge
    parameters:
      weight: [0.02,  0.108, 0.108, 0.108, 0.108, 0.108]
      density: 0.5
  - model: /home/alpha/Models/Raw/migtissera_Tess-M-Creative-v1.0
    parameters:
      weight: [0.22, 0.100, 0.100, 0.100, 0.100, 0.10]
      density: 0.59
merge_method: dare_ties
tokenizer_source: union
base_model: /home/alpha/Storage/Models/Raw/chargoddard_Yi-34B-200K-Llama
parameters:
  int8_mask: true
dtype: bfloat16