SmerkyG commited on
Commit
2c743b5
1 Parent(s): 2d157bd

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +10 -3
config.json CHANGED
@@ -1,16 +1,23 @@
1
  {
2
- "attention_hidden_size": 2560,
 
 
 
 
 
 
 
3
  "bos_token_id": 0,
4
  "eos_token_id": 0,
5
  "head_size": 64,
6
  "head_size_divisor": 8,
7
- "hidden_size": 2560,
8
  "intermediate_size": null,
9
  "layer_norm_epsilon": 1e-05,
10
  "max_context_length": 4096,
11
  "model_type": "rwkv6",
12
  "num_attention_heads": 64,
13
- "num_hidden_layers": 32,
14
  "rescale_every": 6,
15
  "tie_word_embeddings": false,
16
  "transformers_version": "4.37.2",
 
1
  {
2
+ "architectures": [
3
+ "Rwkv6ForCausalLM"
4
+ ],
5
+ "auto_map": {
6
+ "AutoConfig": "configuration_rwkv6.Rwkv6Config",
7
+ "AutoModelForCausalLM": "modeling_rwkv6.Rwkv6ForCausalLM"
8
+ },
9
+ "attention_hidden_size": 2048,
10
  "bos_token_id": 0,
11
  "eos_token_id": 0,
12
  "head_size": 64,
13
  "head_size_divisor": 8,
14
+ "hidden_size": 2048,
15
  "intermediate_size": null,
16
  "layer_norm_epsilon": 1e-05,
17
  "max_context_length": 4096,
18
  "model_type": "rwkv6",
19
  "num_attention_heads": 64,
20
+ "num_hidden_layers": 24,
21
  "rescale_every": 6,
22
  "tie_word_embeddings": false,
23
  "transformers_version": "4.37.2",