Crystalcareai commited on
Commit
63fd118
1 Parent(s): 8a4a739

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +4 -3
config.json CHANGED
@@ -1,12 +1,13 @@
1
  {
2
  "_name_or_path": "./commerge1",
3
  "architectures": [
4
- "GemmaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "auto_map": {
9
- "AutoModelForCausalLM": "Crystalcareai/Gemmoe-7b-pre--modeling_gemma.GemmaForCausalLM",
 
10
  "AutoTokenizer": "Crystalcareai/GemMoE-Beta-1--tokenization_gemmoe.GemmoeTokenizer"
11
  },
12
  "bos_token_id": 2,
@@ -17,7 +18,7 @@
17
  "initializer_range": 0.02,
18
  "intermediate_size": 24576,
19
  "max_position_embeddings": 8192,
20
- "model_type": "gemma",
21
  "num_attention_heads": 16,
22
  "num_hidden_layers": 28,
23
  "num_key_value_heads": 16,
 
1
  {
2
  "_name_or_path": "./commerge1",
3
  "architectures": [
4
+ "GemmoeForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "auto_map": {
9
+ "AutoModelForCausalLM": "Crystalcareai/Gemmoe-7b-pre--modeling_gemmoe.GemmaForCausalLM",
10
+ "AutoConfig": "Crystalcareai/Gemmoe-7b-pre--configuration_gemmoe.GemmoeConfig",
11
  "AutoTokenizer": "Crystalcareai/GemMoE-Beta-1--tokenization_gemmoe.GemmoeTokenizer"
12
  },
13
  "bos_token_id": 2,
 
18
  "initializer_range": 0.02,
19
  "intermediate_size": 24576,
20
  "max_position_embeddings": 8192,
21
+ "model_type": "gemmoe",
22
  "num_attention_heads": 16,
23
  "num_hidden_layers": 28,
24
  "num_key_value_heads": 16,