dendimaki commited on
Commit
2d7fe55
1 Parent(s): c4b6747

Upload config

Browse files
Files changed (1) hide show
  1. config.json +77 -61
config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
- "_name_or_path": "mistralai/Mistral-7B-v0.1",
3
  "architectures": [
4
- "MistralForSequenceClassification"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
@@ -9,80 +9,96 @@
9
  "hidden_act": "silu",
10
  "hidden_size": 4096,
11
  "id2label": {
12
- "0": "LABEL_0",
13
- "1": "LABEL_1",
14
- "2": "LABEL_2",
15
- "3": "LABEL_3",
16
- "4": "LABEL_4",
17
- "5": "LABEL_5",
18
- "6": "LABEL_6",
19
- "7": "LABEL_7",
20
- "8": "LABEL_8",
21
- "9": "LABEL_9",
22
- "10": "LABEL_10",
23
- "11": "LABEL_11",
24
- "12": "LABEL_12",
25
- "13": "LABEL_13",
26
- "14": "LABEL_14",
27
- "15": "LABEL_15",
28
- "16": "LABEL_16",
29
- "17": "LABEL_17",
30
- "18": "LABEL_18",
31
- "19": "LABEL_19",
32
- "20": "LABEL_20",
33
- "21": "LABEL_21",
34
- "22": "LABEL_22",
35
- "23": "LABEL_23",
36
- "24": "LABEL_24",
37
- "25": "LABEL_25",
38
- "26": "LABEL_26",
39
- "27": "LABEL_27",
40
- "28": "LABEL_28"
41
  },
42
  "initializer_range": 0.02,
43
  "intermediate_size": 14336,
44
  "label2id": {
45
- "LABEL_0": 0,
46
- "LABEL_1": 1,
47
- "LABEL_10": 10,
48
- "LABEL_11": 11,
49
- "LABEL_12": 12,
50
- "LABEL_13": 13,
51
- "LABEL_14": 14,
52
- "LABEL_15": 15,
53
- "LABEL_16": 16,
54
- "LABEL_17": 17,
55
- "LABEL_18": 18,
56
- "LABEL_19": 19,
57
- "LABEL_2": 2,
58
- "LABEL_20": 20,
59
- "LABEL_21": 21,
60
- "LABEL_22": 22,
61
- "LABEL_23": 23,
62
- "LABEL_24": 24,
63
- "LABEL_25": 25,
64
- "LABEL_26": 26,
65
- "LABEL_27": 27,
66
- "LABEL_28": 28,
67
- "LABEL_3": 3,
68
- "LABEL_4": 4,
69
- "LABEL_5": 5,
70
- "LABEL_6": 6,
71
- "LABEL_7": 7,
72
- "LABEL_8": 8,
73
- "LABEL_9": 9
74
  },
75
  "max_position_embeddings": 32768,
76
  "model_type": "mistral",
77
  "num_attention_heads": 32,
78
  "num_hidden_layers": 32,
79
  "num_key_value_heads": 8,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
80
  "rms_norm_eps": 1e-05,
81
  "rope_theta": 10000.0,
82
  "sliding_window": 4096,
83
  "tie_word_embeddings": false,
84
  "torch_dtype": "bfloat16",
85
  "transformers_version": "4.42.0.dev0",
86
- "use_cache": true,
87
  "vocab_size": 32000
88
  }
 
1
  {
2
+ "_name_or_path": "./mistralai/Mistral-7B-v0.1",
3
  "architectures": [
4
+ "MistralForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
  "bos_token_id": 1,
 
9
  "hidden_act": "silu",
10
  "hidden_size": 4096,
11
  "id2label": {
12
+ "0": "loc1lay1",
13
+ "1": "loc1lay2",
14
+ "2": "loc1lay3",
15
+ "3": "loc1lay4",
16
+ "4": "loc2lay1",
17
+ "5": "loc2lay2",
18
+ "6": "loc2lay3",
19
+ "7": "loc2lay4",
20
+ "8": "loc3lay1",
21
+ "9": "loc3lay2",
22
+ "10": "loc3lay3",
23
+ "11": "loc3lay4",
24
+ "12": "loc4lay1",
25
+ "13": "loc4lay2",
26
+ "14": "loc4lay3",
27
+ "15": "loc4lay4",
28
+ "16": "loc5+",
29
+ "17": "loc1",
30
+ "18": "loc2",
31
+ "19": "loc3",
32
+ "20": "loc4",
33
+ "21": "nfw",
34
+ "22": "tfw",
35
+ "23": "fwc",
36
+ "24": "fwp",
37
+ "25": "lay1",
38
+ "26": "lay2",
39
+ "27": "lay3",
40
+ "28": "lay4"
41
  },
42
  "initializer_range": 0.02,
43
  "intermediate_size": 14336,
44
  "label2id": {
45
+ "fwc": 23,
46
+ "fwp": 24,
47
+ "lay1": 25,
48
+ "lay2": 26,
49
+ "lay3": 27,
50
+ "lay4": 28,
51
+ "loc1": 17,
52
+ "loc1lay1": 0,
53
+ "loc1lay2": 1,
54
+ "loc1lay3": 2,
55
+ "loc1lay4": 3,
56
+ "loc2": 18,
57
+ "loc2lay1": 4,
58
+ "loc2lay2": 5,
59
+ "loc2lay3": 6,
60
+ "loc2lay4": 7,
61
+ "loc3": 19,
62
+ "loc3lay1": 8,
63
+ "loc3lay2": 9,
64
+ "loc3lay3": 10,
65
+ "loc3lay4": 11,
66
+ "loc4": 20,
67
+ "loc4lay1": 12,
68
+ "loc4lay2": 13,
69
+ "loc4lay3": 14,
70
+ "loc4lay4": 15,
71
+ "loc5+": 16,
72
+ "nfw": 21,
73
+ "tfw": 22
74
  },
75
  "max_position_embeddings": 32768,
76
  "model_type": "mistral",
77
  "num_attention_heads": 32,
78
  "num_hidden_layers": 32,
79
  "num_key_value_heads": 8,
80
+ "pad_token_id": 2,
81
+ "quantization_config": {
82
+ "_load_in_4bit": true,
83
+ "_load_in_8bit": false,
84
+ "bnb_4bit_compute_dtype": "bfloat16",
85
+ "bnb_4bit_quant_storage": "uint8",
86
+ "bnb_4bit_quant_type": "nf4",
87
+ "bnb_4bit_use_double_quant": true,
88
+ "llm_int8_enable_fp32_cpu_offload": false,
89
+ "llm_int8_has_fp16_weight": false,
90
+ "llm_int8_skip_modules": null,
91
+ "llm_int8_threshold": 6.0,
92
+ "load_in_4bit": true,
93
+ "load_in_8bit": false,
94
+ "quant_method": "bitsandbytes"
95
+ },
96
  "rms_norm_eps": 1e-05,
97
  "rope_theta": 10000.0,
98
  "sliding_window": 4096,
99
  "tie_word_embeddings": false,
100
  "torch_dtype": "bfloat16",
101
  "transformers_version": "4.42.0.dev0",
102
+ "use_cache": false,
103
  "vocab_size": 32000
104
  }