SamaOkasha commited on
Commit
952ce3a
1 Parent(s): 3a7f730

Upload folder using huggingface_hub

Browse files
Files changed (2) hide show
  1. README.json +37 -0
  2. config.json +53 -0
README.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ {}
3
+ ---
4
+ {
5
+ "license": "apache-2.0",
6
+ "tags": [
7
+ "merge",
8
+ "mergekit",
9
+ "lazymergekit",
10
+ "allenai/lama-large-7",
11
+ "allenai/lama-large-13",
12
+ ],
13
+ "model_name": "LaMa-Merged-7__13",
14
+ "model": "Your Model Information Here",
15
+ "description": "LaMa-Merged-7__13 is a merge of the following models using mergekit.",
16
+ "external_datasets": {},
17
+ "git_repo": "https://github.com/cg123/mergekit",
18
+ "website": "",
19
+ "tasks": [],
20
+ "languages": [],
21
+ "image": "",
22
+ "files": [
23
+ {
24
+ "filename": "config.json",
25
+ "type": "yaml",
26
+ "title": "Configuration"
27
+ }
28
+ ],
29
+ "framework": "",
30
+ "format": "json",
31
+ "references": [],
32
+ "contact": "",
33
+ "creation_date": "2022-03-21",
34
+ "authors": [
35
+ "SamaOkasha"
36
+ ]
37
+ }
config.json ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "slices": [
3
+ {
4
+ "sources": [
5
+ {
6
+ "model": "allenai/lama-large-7",
7
+ "layer_range": [
8
+ 0,
9
+ 6
10
+ ]
11
+ },
12
+ {
13
+ "model": "allenai/lama-large-13",
14
+ "layer_range": [
15
+ 7,
16
+ 13
17
+ ]
18
+ }
19
+ ]
20
+ }
21
+ ],
22
+ "merge_method": "slerp",
23
+ "base_model": "allenai/lama-large-7",
24
+ "parameters": {
25
+ "t": [
26
+ {
27
+ "filter": "self_attn",
28
+ "value": [
29
+ 0,
30
+ 0.5,
31
+ 0.3,
32
+ 0.7,
33
+ 1
34
+ ]
35
+ },
36
+ {
37
+ "filter": "mlp",
38
+ "value": [
39
+ 1,
40
+ 0.5,
41
+ 0.7,
42
+ 0.3,
43
+ 0
44
+ ]
45
+ },
46
+ {
47
+ "value": 0.5
48
+ }
49
+ ]
50
+ },
51
+ "dtype": "bfloat16",
52
+ "model_type": "led"
53
+ }