JustinLin610
commited on
Commit
•
1f3785e
1
Parent(s):
c5abbb1
update tokenizer_config.json,config.json,generation_config.json
Browse files- config.json +3 -3
- generation_config.json +1 -1
- tokenizer_config.json +1 -1
config.json
CHANGED
@@ -4,13 +4,13 @@
|
|
4 |
],
|
5 |
"attention_dropout": 0.0,
|
6 |
"bos_token_id": 151643,
|
7 |
-
"eos_token_id":
|
8 |
"hidden_act": "silu",
|
9 |
"hidden_size": 896,
|
10 |
"initializer_range": 0.02,
|
11 |
"intermediate_size": 4864,
|
12 |
"max_position_embeddings": 32768,
|
13 |
-
"max_window_layers":
|
14 |
"model_type": "qwen2",
|
15 |
"num_attention_heads": 14,
|
16 |
"num_hidden_layers": 24,
|
@@ -44,7 +44,7 @@
|
|
44 |
"sliding_window": 32768,
|
45 |
"tie_word_embeddings": true,
|
46 |
"torch_dtype": "float16",
|
47 |
-
"transformers_version": "4.
|
48 |
"use_cache": true,
|
49 |
"use_sliding_window": false,
|
50 |
"vocab_size": 151936
|
|
|
4 |
],
|
5 |
"attention_dropout": 0.0,
|
6 |
"bos_token_id": 151643,
|
7 |
+
"eos_token_id": 151645,
|
8 |
"hidden_act": "silu",
|
9 |
"hidden_size": 896,
|
10 |
"initializer_range": 0.02,
|
11 |
"intermediate_size": 4864,
|
12 |
"max_position_embeddings": 32768,
|
13 |
+
"max_window_layers": 24,
|
14 |
"model_type": "qwen2",
|
15 |
"num_attention_heads": 14,
|
16 |
"num_hidden_layers": 24,
|
|
|
44 |
"sliding_window": 32768,
|
45 |
"tie_word_embeddings": true,
|
46 |
"torch_dtype": "float16",
|
47 |
+
"transformers_version": "4.43.1",
|
48 |
"use_cache": true,
|
49 |
"use_sliding_window": false,
|
50 |
"vocab_size": 151936
|
generation_config.json
CHANGED
@@ -10,5 +10,5 @@
|
|
10 |
"temperature": 0.7,
|
11 |
"top_k": 20,
|
12 |
"top_p": 0.8,
|
13 |
-
"transformers_version": "4.
|
14 |
}
|
|
|
10 |
"temperature": 0.7,
|
11 |
"top_k": 20,
|
12 |
"top_p": 0.8,
|
13 |
+
"transformers_version": "4.43.1"
|
14 |
}
|
tokenizer_config.json
CHANGED
@@ -199,7 +199,7 @@
|
|
199 |
"clean_up_tokenization_spaces": false,
|
200 |
"eos_token": "<|im_end|>",
|
201 |
"errors": "replace",
|
202 |
-
"model_max_length":
|
203 |
"pad_token": "<|endoftext|>",
|
204 |
"split_special_tokens": false,
|
205 |
"tokenizer_class": "Qwen2Tokenizer",
|
|
|
199 |
"clean_up_tokenization_spaces": false,
|
200 |
"eos_token": "<|im_end|>",
|
201 |
"errors": "replace",
|
202 |
+
"model_max_length": 32768,
|
203 |
"pad_token": "<|endoftext|>",
|
204 |
"split_special_tokens": false,
|
205 |
"tokenizer_class": "Qwen2Tokenizer",
|