g-ronimo commited on
Commit
81a55c4
1 Parent(s): 0694e48

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -2
config.json CHANGED
@@ -3,7 +3,7 @@
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
- "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
  "eos_token_id": 128001,
@@ -12,7 +12,7 @@
12
  "initializer_range": 0.02,
13
  "intermediate_size": 14336,
14
  "max_position_embeddings": 8192,
15
- "mlp_bias": false,
16
  "model_type": "llama",
17
  "num_attention_heads": 32,
18
  "num_hidden_layers": 32,
 
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
+ "attention_bias": true,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
9
  "eos_token_id": 128001,
 
12
  "initializer_range": 0.02,
13
  "intermediate_size": 14336,
14
  "max_position_embeddings": 8192,
15
+ "mlp_bias": true,
16
  "model_type": "llama",
17
  "num_attention_heads": 32,
18
  "num_hidden_layers": 32,